lang
stringclasses 1
value | license
stringclasses 13
values | stderr
stringlengths 0
350
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 7
45.1k
| new_contents
stringlengths 0
1.87M
| new_file
stringlengths 6
292
| old_contents
stringlengths 0
1.87M
| message
stringlengths 6
9.26k
| old_file
stringlengths 6
292
| subject
stringlengths 0
4.45k
|
---|---|---|---|---|---|---|---|---|---|---|---|
Java | apache-2.0 | 4e7a62ab74f47c55667d597d5d173bbd7408cb6a | 0 | TelluIoT/ThingML,TelluIoT/ThingML,TelluIoT/ThingML,TelluIoT/ThingML,TelluIoT/ThingML,TelluIoT/ThingML | /**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*/
package org.thingml.compilers.utils;
import org.thingml.compilers.Context;
import org.thingml.compilers.thing.ThingActionCompiler;
import org.thingml.xtext.helpers.ToString;
import org.thingml.xtext.thingML.Action;
import org.thingml.xtext.thingML.ActionBlock;
import org.thingml.xtext.thingML.CastExpression;
import org.thingml.xtext.thingML.Expression;
import org.thingml.xtext.thingML.FunctionCallExpression;
import org.thingml.xtext.thingML.FunctionCallStatement;
import org.thingml.xtext.thingML.SendAction;
import org.thingml.xtext.thingML.StringLiteral;
/**
* Created by bmori on 01.12.2014.
*/
public class ThingMLPrettyPrinter extends ThingActionCompiler {
public static boolean USE_ELLIPSIS_FOR_PARAMS = true;
public static int MAX_BLOCK_SIZE = 3;
public static boolean HIDE_BLOCKS = false;
public final static String NEW_LINE = "\\n";
public final static String INDENT = " "; //two blank spaces for indentation
public static int indent_level = 0;
//ThingML pretty printer (useful for documentation, etc)
/** ACTIONS **/
/**
* Default behavior for any action. Override specific methods if needed to define an alternative behavior
*/
@Override
public void generate(Action action, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(action).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
builder.append(NEW_LINE);
}
@Override
public void generate(SendAction action, StringBuilder builder, Context ctx) {
if (USE_ELLIPSIS_FOR_PARAMS && action.getParameters().size() > 1) {
builder.append(action.getPort().getName() + "!" + action.getMessage().getName() + "(");
builder.append("...");
builder.append(")" + NEW_LINE);
} else {
builder.append(ToString.valueOf(action).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
builder.append(NEW_LINE);
}
}
@Override
public void generate(ActionBlock action, StringBuilder builder, Context ctx) {
StringBuilder temp = new StringBuilder();
if (action.getActions().size() > 1)
temp.append("do");
if (!HIDE_BLOCKS) {
temp.append("\\n");
}
indent_level++;
if (HIDE_BLOCKS && action.getActions().size() > 1) {
temp.append("...");
} else {
if (action.getActions().size() > MAX_BLOCK_SIZE) {
int i = 0;
for (Action a : action.getActions()) {
if (i < MAX_BLOCK_SIZE/2) {
generate(a, temp, ctx);
i++;
} else {
break;
}
}
temp.append("..." + NEW_LINE);
i = 0;
for (Action a : action.getActions()) {
if (i > MAX_BLOCK_SIZE/2 + 1) {
generate(a, temp, ctx);
}
i++;
}
} else {
for (Action a : action.getActions()) {
generate(a, temp, ctx);
}
}
}
indent_level--;
if (action.getActions().size() > 1)
temp.append("end");
if (!HIDE_BLOCKS)
temp.append("\n");
builder.append(temp.toString().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(FunctionCallStatement action, StringBuilder builder, Context ctx) {
if (USE_ELLIPSIS_FOR_PARAMS && action.getParameters().size() > 1) {
builder.append(action.getFunction().getName() + "(...)" + NEW_LINE);
} else {
builder.append(ToString.valueOf(action).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
builder.append(NEW_LINE);
}
}
/** EXPRESSIONS **/
/**
* Default behavior for any expression. Override specific methods if needed to define an alternative behavior
*/
@Override
public void generate(Expression expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(StringLiteral expression, StringBuilder builder, Context ctx) {
builder.append("\"" + expression.getStringValue().replace("\r\n", "\\n").replace("\n", "\\n").replace("\\n","\\\\n") + "\"");
}
@Override
public void generate(FunctionCallExpression expression, StringBuilder builder, Context ctx) {
if (USE_ELLIPSIS_FOR_PARAMS && expression.getParameters().size() > 1) {
builder.append(expression.getFunction().getName() + "(...)");
} else {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
}
@Override
public void generate(CastExpression expression, StringBuilder builder, Context ctx) {
//We do not cast explicitly in the generated code. Should a cast be needed, it has to be done in an extern expression
generate(expression.getTerm(), builder, ctx);
}
}
| compilers/framework/src/main/java/org/thingml/compilers/utils/ThingMLPrettyPrinter.java | /**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*/
package org.thingml.compilers.utils;
import org.thingml.compilers.Context;
import org.thingml.compilers.thing.ThingActionCompiler;
import org.thingml.xtext.helpers.ToString;
import org.thingml.xtext.thingML.Action;
import org.thingml.xtext.thingML.ActionBlock;
import org.thingml.xtext.thingML.AndExpression;
import org.thingml.xtext.thingML.ArrayIndex;
import org.thingml.xtext.thingML.BooleanLiteral;
import org.thingml.xtext.thingML.ByteLiteral;
import org.thingml.xtext.thingML.CastExpression;
import org.thingml.xtext.thingML.CharLiteral;
import org.thingml.xtext.thingML.ConditionalAction;
import org.thingml.xtext.thingML.Decrement;
import org.thingml.xtext.thingML.DivExpression;
import org.thingml.xtext.thingML.DoubleLiteral;
import org.thingml.xtext.thingML.EnumLiteralRef;
import org.thingml.xtext.thingML.EqualsExpression;
import org.thingml.xtext.thingML.ErrorAction;
import org.thingml.xtext.thingML.EventReference;
import org.thingml.xtext.thingML.ExpressionGroup;
import org.thingml.xtext.thingML.ExternExpression;
import org.thingml.xtext.thingML.ExternStatement;
import org.thingml.xtext.thingML.ForAction;
import org.thingml.xtext.thingML.FunctionCallExpression;
import org.thingml.xtext.thingML.FunctionCallStatement;
import org.thingml.xtext.thingML.GreaterExpression;
import org.thingml.xtext.thingML.GreaterOrEqualExpression;
import org.thingml.xtext.thingML.Increment;
import org.thingml.xtext.thingML.IntegerLiteral;
import org.thingml.xtext.thingML.LocalVariable;
import org.thingml.xtext.thingML.LoopAction;
import org.thingml.xtext.thingML.LowerExpression;
import org.thingml.xtext.thingML.LowerOrEqualExpression;
import org.thingml.xtext.thingML.MinusExpression;
import org.thingml.xtext.thingML.ModExpression;
import org.thingml.xtext.thingML.NotEqualsExpression;
import org.thingml.xtext.thingML.NotExpression;
import org.thingml.xtext.thingML.OrExpression;
import org.thingml.xtext.thingML.PlusExpression;
import org.thingml.xtext.thingML.PrintAction;
import org.thingml.xtext.thingML.PropertyReference;
import org.thingml.xtext.thingML.ReturnAction;
import org.thingml.xtext.thingML.SendAction;
import org.thingml.xtext.thingML.StartSession;
import org.thingml.xtext.thingML.StringLiteral;
import org.thingml.xtext.thingML.TimesExpression;
import org.thingml.xtext.thingML.UnaryMinus;
import org.thingml.xtext.thingML.VariableAssignment;
/**
* Created by bmori on 01.12.2014.
*/
public class ThingMLPrettyPrinter extends ThingActionCompiler {
public static boolean USE_ELLIPSIS_FOR_PARAMS = true;
public static int MAX_BLOCK_SIZE = 3;
public static boolean HIDE_BLOCKS = true;
public final static String NEW_LINE = "\\n";
public final static String INDENT = " "; //two blank spaces for indentation
public static int indent_level = 0;
//ThingML pretty printer (useful for documentation, etc)
@Override
public void generate(SendAction action, StringBuilder builder, Context ctx) {
if (USE_ELLIPSIS_FOR_PARAMS && action.getParameters().size() > 1) {
builder.append(action.getPort().getName() + "!" + action.getMessage().getName() + "(");
builder.append("...");
builder.append(")" + NEW_LINE);
} else {
builder.append(ToString.valueOf(action).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
builder.append(NEW_LINE);
}
}
@Override
public void generate(StartSession action, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(action).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
builder.append(NEW_LINE);
}
@Override
public void generate(VariableAssignment action, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(action).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
builder.append(NEW_LINE);
}
@Override
public void generate(ActionBlock action, StringBuilder builder, Context ctx) {
StringBuilder temp = new StringBuilder();
if (action.getActions().size() > 1)
temp.append("do");
if (!HIDE_BLOCKS) {
temp.append("\\n");
}
indent_level++;
if (HIDE_BLOCKS && action.getActions().size() > 1) {
temp.append("...");
} else {
if (action.getActions().size() > MAX_BLOCK_SIZE) {
int i = 0;
for (Action a : action.getActions()) {
if (i < MAX_BLOCK_SIZE/2) {
generate(a, temp, ctx);
i++;
} else {
break;
}
}
temp.append("..." + NEW_LINE);
i = 0;
for (Action a : action.getActions()) {
if (i > MAX_BLOCK_SIZE/2 + 1) {
generate(a, temp, ctx);
}
i++;
}
} else {
for (Action a : action.getActions()) {
generate(a, temp, ctx);
}
}
}
indent_level--;
if (action.getActions().size() > 1)
temp.append("end");
if (!HIDE_BLOCKS)
temp.append("\n");
builder.append(temp.toString().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(ExternStatement action, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(action).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
builder.append(NEW_LINE);
}
@Override
public void generate(ConditionalAction action, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(action).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
builder.append(NEW_LINE);
}
@Override
public void generate(LoopAction action, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(action).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
builder.append(NEW_LINE);
}
@Override
public void generate(ForAction fa, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(fa).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
builder.append(NEW_LINE);
}
@Override
public void generate(PrintAction action, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(action).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
builder.append(NEW_LINE);
}
@Override
public void generate(ErrorAction action, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(action).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
builder.append(NEW_LINE);
}
@Override
public void generate(ReturnAction action, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(action).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
builder.append(NEW_LINE);
}
@Override
public void generate(LocalVariable action, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(action).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
builder.append(NEW_LINE);
}
@Override
public void generate(FunctionCallStatement action, StringBuilder builder, Context ctx) {
if (USE_ELLIPSIS_FOR_PARAMS && action.getParameters().size() > 1) {
builder.append(action.getFunction().getName() + "(...)" + NEW_LINE);
} else {
builder.append(ToString.valueOf(action).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
builder.append(NEW_LINE);
}
}
//ThingML expressions that can be compiled the same way for any imperative language like (Java, JS, C)
@Override
public void generate(ArrayIndex expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(OrExpression expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(AndExpression expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(LowerExpression expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(GreaterExpression expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(LowerOrEqualExpression expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(GreaterOrEqualExpression expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(EqualsExpression expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(NotEqualsExpression expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(PlusExpression expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(MinusExpression expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(TimesExpression expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(DivExpression expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(ModExpression expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(UnaryMinus expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(NotExpression expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(PropertyReference expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(ByteLiteral expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(CharLiteral expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(IntegerLiteral expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(DoubleLiteral expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(StringLiteral expression, StringBuilder builder, Context ctx) {
builder.append("\"" + expression.getStringValue().replace("\r\n", "\\n").replace("\n", "\\n").replace("\\n","\\\\n") + "\"");
}
@Override
public void generate(BooleanLiteral expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(EnumLiteralRef expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(ExternExpression expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(FunctionCallExpression expression, StringBuilder builder, Context ctx) {
if (USE_ELLIPSIS_FOR_PARAMS && expression.getParameters().size() > 1) {
builder.append(expression.getFunction().getName() + "(...)");
} else {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
}
@Override
public void generate(Increment action, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(action).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
builder.append(NEW_LINE);
}
@Override
public void generate(Decrement action, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(action).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
builder.append(NEW_LINE);
}
@Override
public void generate(EventReference expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(ExpressionGroup expression, StringBuilder builder, Context ctx) {
builder.append(ToString.valueOf(expression).trim().replace("\r\n", "\\n").replace("\n", "\\n"));
}
@Override
public void generate(CastExpression expression, StringBuilder builder, Context ctx) {
//We do not cast explicitly in the generated code. Should a cast be needed, it has to be done in an extern expression
generate(expression.getTerm(), builder, ctx);
}
}
| cleanup
| compilers/framework/src/main/java/org/thingml/compilers/utils/ThingMLPrettyPrinter.java | cleanup |
|
Java | apache-2.0 | 653f7aec1c787fbfef88d4b951f13e985f990299 | 0 | cristianonicolai/guvnor,hxf0801/guvnor,nmirasch/guvnor,mbiarnes/guvnor,adrielparedes/guvnor,baldimir/guvnor,hxf0801/guvnor,droolsjbpm/guvnor,porcelli-forks/guvnor,porcelli-forks/guvnor,droolsjbpm/guvnor,wmedvede/guvnor,etirelli/guvnor,baldimir/guvnor,yurloc/guvnor,etirelli/guvnor,psiroky/guvnor,cristianonicolai/guvnor,mswiderski/guvnor,cristianonicolai/guvnor,nmirasch/guvnor,mbiarnes/guvnor,porcelli-forks/guvnor,psiroky/guvnor,kiereleaseuser/guvnor,psiroky/guvnor,etirelli/guvnor,kiereleaseuser/guvnor,adrielparedes/guvnor,wmedvede/guvnor,droolsjbpm/guvnor,mbiarnes/guvnor,baldimir/guvnor,kiereleaseuser/guvnor,wmedvede/guvnor,nmirasch/guvnor,hxf0801/guvnor,yurloc/guvnor,adrielparedes/guvnor | /*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.guvnor.server;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.List;
import java.util.Set;
import javax.inject.Inject;
import javax.servlet.http.HttpServletResponse;
import org.drools.guvnor.client.rpc.AssetService;
import org.drools.guvnor.client.rpc.CategoryService;
import org.drools.guvnor.client.rpc.ConversionResult;
import org.drools.guvnor.client.rpc.Module;
import org.drools.guvnor.client.rpc.ModuleService;
import org.drools.guvnor.client.rpc.RepositoryService;
import org.drools.guvnor.client.rpc.SnapshotInfo;
import org.drools.guvnor.server.util.LoggingHelper;
import org.drools.ide.common.client.modeldriven.testing.Scenario;
import org.drools.ide.common.shared.workitems.PortableWorkDefinition;
import org.drools.repository.RulesRepositoryException;
import org.jboss.seam.security.AuthorizationException;
import org.jboss.seam.security.NotLoggedInException;
import com.google.gwt.user.client.rpc.SerializationException;
import com.google.gwt.user.server.rpc.RemoteServiceServlet;
/**
* GWT RPC service endpoint for Repository service. A place to hang some exception handling mainly.
* This passes on all requests unmolested to the underlying ServiceImplemention class.
*/
public class RepositoryServiceServlet extends RemoteServiceServlet
implements
RepositoryService,
AssetService,
ModuleService,
CategoryService {
private static final long serialVersionUID = 495822L;
private static final LoggingHelper log = LoggingHelper.getLogger( RepositoryServiceServlet.class );
@Inject
private ServiceImplementation serviceImplementation;
@Inject
private RepositoryAssetService assetService;
@Inject
private RepositoryModuleService moduleService;
@Inject
private RepositoryCategoryService categoryService;
@Override
protected void doUnexpectedFailure(Throwable e) {
if ( e.getCause() instanceof AuthorizationException ) {
HttpServletResponse response = getThreadLocalResponse();
PrintWriter writer = null;
try {
writer = response.getWriter();
log.error( e.getMessage(),
e.getCause() );
e.printStackTrace();
response.setContentType( "text/plain" );
response.setStatus( HttpServletResponse.SC_UNAUTHORIZED );
writer.write( "Sorry, insufficient permissions to perform this action." );
} catch (IOException ex) {
getServletContext().log( "respondWithUnexpectedFailure failed while sending the previous failure to the client",
ex );
} finally {
close( writer );
}
} else if ( e.getCause() instanceof RulesRepositoryException ) {
log.error( e.getMessage(),
e.getCause() );
sendErrorMessage( e.getCause().getMessage() );
} else if ( e.getCause() instanceof NotLoggedInException ) {
log.error( e.getMessage(),
e.getCause() );
sendErrorMessage( "You are not logged in. Please refresh your browser and try again." );
} else {
if ( e.getCause() != null ) {
log.error( e.getMessage(),
e.getCause() );
} else {
log.error( e.getMessage(),
e );
}
sendErrorMessage( "Sorry, a technical error occurred. Please contact a system administrator." );
}
}
private void close(PrintWriter writer) {
if ( writer != null ) {
writer.flush();
writer.close();
}
}
private void sendErrorMessage(String msg) {
HttpServletResponse response = getThreadLocalResponse();
response.setContentType( "text/plain" );
response.setStatus( HttpServletResponse.SC_BAD_REQUEST );
PrintWriter writer = null;
try {
writer = response.getWriter();
writer.write( msg );
} catch (IOException ex) {
getServletContext().log( "respondWithUnexpectedFailure failed while sending the previous failure to the client",
ex );
} finally {
close( writer );
}
}
/**
* PLACE THE FOLLOWING IN RepositoryServiceServlet.java *
*/
public java.lang.String[] loadChildCategories(java.lang.String p0) {
return categoryService.loadChildCategories( p0 );
}
public org.drools.guvnor.client.rpc.TableDataResult loadRuleListForCategories(java.lang.String p0, int p1, int p2, java.lang.String p3) throws com.google.gwt.user.client.rpc.SerializationException {
return categoryService.loadRuleListForCategories( p0, p1, p2, p3 );
}
public org.drools.guvnor.client.rpc.PageResponse loadRuleListForCategories(org.drools.guvnor.client.rpc.CategoryPageRequest p0) throws com.google.gwt.user.client.rpc.SerializationException {
return categoryService.loadRuleListForCategories( p0 );
}
public java.lang.Boolean createCategory(java.lang.String p0, java.lang.String p1, java.lang.String p2) {
return categoryService.createCategory( p0, p1, p2 );
}
public void removeCategory(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
categoryService.removeCategory( p0 );
}
public void renameCategory(java.lang.String p0, java.lang.String p1) {
categoryService.renameCategory( p0, p1 );
}
/**
* PLACE THE FOLLOWING IN RepositoryServiceServlet.java *
*/
public org.drools.guvnor.client.rpc.Module[] listModules(java.lang.String p0) {
return moduleService.listModules( p0 );
}
public org.drools.guvnor.client.rpc.Module[] listModules() {
return moduleService.listModules();
}
public org.drools.guvnor.client.rpc.Module[] listArchivedModules() {
return moduleService.listArchivedModules();
}
public org.drools.guvnor.client.rpc.Module loadGlobalModule() {
return moduleService.loadGlobalModule();
}
public SnapshotInfo loadSnapshotInfo(String moduleName, String snapshotName) {
return moduleService.loadSnapshotInfo( moduleName, snapshotName );
}
public java.lang.String createModule(java.lang.String p0,
java.lang.String p1,
java.lang.String p2) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.createModule( p0,
p1, p2);
}
public java.lang.String createSubModule(java.lang.String p0,
java.lang.String p1,
java.lang.String p2) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.createSubModule( p0,
p1,
p2 );
}
public org.drools.guvnor.client.rpc.Module loadModule(java.lang.String p0) {
return moduleService.loadModule( p0 );
}
public org.drools.guvnor.client.rpc.ValidatedResponse validateModule(org.drools.guvnor.client.rpc.Module p0) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.validateModule( p0 );
}
public void saveModule(org.drools.guvnor.client.rpc.Module p0) throws com.google.gwt.user.client.rpc.SerializationException {
moduleService.saveModule( p0 );
}
public void createModuleSnapshot(java.lang.String p0,
java.lang.String p1,
boolean p2,
java.lang.String p3) {
moduleService.createModuleSnapshot( p0,
p1,
p2,
p3 );
}
public void copyOrRemoveSnapshot(java.lang.String p0,
java.lang.String p1,
boolean p2,
java.lang.String p3) throws com.google.gwt.user.client.rpc.SerializationException {
moduleService.copyOrRemoveSnapshot( p0,
p1,
p2,
p3 );
}
public org.drools.guvnor.client.rpc.BuilderResult buildPackage(java.lang.String p0,
boolean p1,
java.lang.String p2,
java.lang.String p3,
java.lang.String p4,
boolean p5,
java.lang.String p6,
java.lang.String p7,
boolean p8,
java.lang.String p9) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.buildPackage( p0,
p1,
p2,
p3,
p4,
p5,
p6,
p7,
p8,
p9 );
}
public java.lang.String buildModuleSource(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.buildModuleSource( p0 );
}
public String copyModule(java.lang.String p0,
java.lang.String p1) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.copyModule( p0,
p1 );
}
public void removeModule(java.lang.String p0) {
moduleService.removeModule( p0 );
}
public java.lang.String renameModule(java.lang.String p0,
java.lang.String p1) {
return moduleService.renameModule( p0,
p1 );
}
public void rebuildSnapshots() throws com.google.gwt.user.client.rpc.SerializationException {
moduleService.rebuildSnapshots();
}
public void rebuildPackages() throws com.google.gwt.user.client.rpc.SerializationException {
moduleService.rebuildPackages();
}
public java.lang.String[] listRulesInPackage(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.listRulesInPackage( p0 );
}
public java.lang.String[] listImagesInModule(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.listImagesInModule( p0 );
}
public org.drools.guvnor.client.rpc.SnapshotInfo[] listSnapshots(java.lang.String p0) {
return moduleService.listSnapshots( p0 );
}
public java.lang.String[] listTypesInPackage(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.listTypesInPackage( p0 );
}
public void installSampleRepository() throws com.google.gwt.user.client.rpc.SerializationException {
moduleService.installSampleRepository();
}
public org.drools.guvnor.client.rpc.SnapshotDiffs compareSnapshots(java.lang.String p0,
java.lang.String p1,
java.lang.String p2) {
return moduleService.compareSnapshots( p0,
p1,
p2 );
}
public org.drools.guvnor.client.rpc.SnapshotComparisonPageResponse compareSnapshots(org.drools.guvnor.client.rpc.SnapshotComparisonPageRequest p0) {
return moduleService.compareSnapshots( p0 );
}
public org.drools.guvnor.client.rpc.SingleScenarioResult runScenario(java.lang.String p0,
Scenario p1) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.runScenario( p0,
p1 );
}
public org.drools.guvnor.client.rpc.BulkTestRunResult runScenariosInPackage(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.runScenariosInPackage( p0 );
}
/**
* PLACE THE FOLLOWING IN RepositoryServiceServlet.java *
*/
public org.drools.guvnor.client.rpc.PageResponse quickFindAsset(org.drools.guvnor.client.rpc.QueryPageRequest p0) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.quickFindAsset( p0 );
}
public org.drools.guvnor.client.rpc.TableDataResult quickFindAsset(java.lang.String p0,
boolean p1,
int p2,
int p3) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.quickFindAsset( p0,
p1,
p2,
p3 );
}
public org.drools.guvnor.client.rpc.TableDataResult queryFullText(java.lang.String p0,
boolean p1,
int p2,
int p3) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.queryFullText( p0,
p1,
p2,
p3 );
}
public java.lang.String getAssetLockerUserName(java.lang.String p0) {
return assetService.getAssetLockerUserName( p0 );
}
public void lockAsset(java.lang.String p0) {
assetService.lockAsset( p0 );
}
public void unLockAsset(java.lang.String p0) {
assetService.unLockAsset( p0 );
}
public void archiveAsset(java.lang.String p0) {
assetService.archiveAsset( p0 );
}
public void unArchiveAsset(java.lang.String p0) {
assetService.unArchiveAsset( p0 );
}
public void archiveAssets(java.lang.String[] p0,
boolean p1) {
assetService.archiveAssets( p0,
p1 );
}
public void removeAsset(java.lang.String p0) {
assetService.removeAsset( p0 );
}
public void removeAssets(java.lang.String[] p0) {
assetService.removeAssets( p0 );
}
public java.lang.String buildAssetSource(org.drools.guvnor.client.rpc.Asset p0) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.buildAssetSource( p0 );
}
public org.drools.guvnor.client.rpc.BuilderResult validateAsset(org.drools.guvnor.client.rpc.Asset p0) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.validateAsset( p0 );
}
public java.lang.String renameAsset(java.lang.String p0,
java.lang.String p1) {
return assetService.renameAsset( p0,
p1 );
}
public org.drools.guvnor.client.rpc.Asset loadRuleAsset(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.loadRuleAsset( p0 );
}
public org.drools.guvnor.client.rpc.Asset[] loadRuleAssets(java.lang.String[] p0) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.loadRuleAssets( p0 );
}
public org.drools.guvnor.client.rpc.TableDataResult loadAssetHistory(java.lang.String p0,
java.lang.String p1) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.loadAssetHistory( p0,
p1 );
}
public org.drools.guvnor.client.rpc.TableDataResult loadItemHistory(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.loadItemHistory( p0 );
}
public org.drools.guvnor.client.rpc.PageResponse loadArchivedAssets(org.drools.guvnor.client.rpc.PageRequest p0) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.loadArchivedAssets( p0 );
}
public org.drools.guvnor.client.rpc.TableDataResult loadArchivedAssets(int p0,
int p1) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.loadArchivedAssets( p0,
p1 );
}
public org.drools.guvnor.client.rpc.PageResponse findAssetPage(org.drools.guvnor.client.rpc.AssetPageRequest p0) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.findAssetPage( p0 );
}
public org.drools.guvnor.client.rpc.TableDataResult listAssets(java.lang.String p0,
java.lang.String[] p1,
int p2,
int p3,
java.lang.String p4) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.listAssets( p0,
p1,
p2,
p3,
p4 );
}
public org.drools.guvnor.client.rpc.TableDataResult listAssetsWithPackageName(java.lang.String p0,
java.lang.String[] p1,
int p2,
int p3,
java.lang.String p4) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.listAssetsWithPackageName( p0,
p1,
p2,
p3,
p4 );
}
public java.lang.String copyAsset(java.lang.String p0,
java.lang.String p1,
java.lang.String p2) {
return assetService.copyAsset( p0,
p1,
p2 );
}
public void promoteAssetToGlobalArea(java.lang.String p0) {
assetService.promoteAssetToGlobalArea( p0 );
}
public void changeAssetPackage(java.lang.String p0,
java.lang.String p1,
java.lang.String p2) {
assetService.changeAssetPackage( p0,
p1,
p2 );
}
public void changeState(java.lang.String p0,
java.lang.String p1) {
assetService.changeState( p0,
p1 );
}
public void changePackageState(java.lang.String p0,
java.lang.String p1) {
assetService.changePackageState( p0,
p1 );
}
public java.util.List loadDiscussionForAsset(java.lang.String p0) {
return assetService.loadDiscussionForAsset( p0 );
}
public java.util.List addToDiscussionForAsset(java.lang.String p0,
java.lang.String p1) {
return assetService.addToDiscussionForAsset( p0,
p1 );
}
public void clearAllDiscussionsForAsset(java.lang.String p0) {
assetService.clearAllDiscussionsForAsset( p0 );
}
public long getAssetCount(org.drools.guvnor.client.rpc.AssetPageRequest p0) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.findAssetPage( p0 ).getTotalRowSize();
}
/**
* PLACE THE FOLLOWING IN RepositoryServiceServlet.java *
*/
public org.drools.guvnor.client.rpc.PageResponse loadRuleListForState(org.drools.guvnor.client.rpc.StatePageRequest p0) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.loadRuleListForState( p0 );
}
public org.drools.guvnor.client.rpc.TableDataResult loadRuleListForState(java.lang.String p0,
int p1,
int p2,
java.lang.String p3) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.loadRuleListForState( p0,
p1,
p2,
p3 );
}
public org.drools.guvnor.client.rpc.TableConfig loadTableConfig(java.lang.String p0) {
return serviceImplementation.loadTableConfig( p0 );
}
public java.lang.String createNewRule(java.lang.String p0,
java.lang.String p1,
java.lang.String p2,
java.lang.String p3,
java.lang.String p4) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.createNewRule( p0,
p1,
p2,
p3,
p4 );
}
public String createNewRule(org.drools.guvnor.client.rpc.NewAssetConfiguration p0) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.createNewRule( p0 );
}
public String createNewRule(org.drools.guvnor.client.rpc.NewGuidedDecisionTableAssetConfiguration p0) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.createNewRule( p0 );
}
public java.lang.String createNewImportedRule(java.lang.String p0,
java.lang.String p1) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.createNewImportedRule( p0,
p1 );
}
public void deleteUncheckedRule(java.lang.String p0) {
serviceImplementation.deleteUncheckedRule( p0 );
}
public void clearRulesRepository() {
serviceImplementation.clearRulesRepository();
}
public java.lang.String[] listWorkspaces() {
return serviceImplementation.listWorkspaces();
}
public void createWorkspace(java.lang.String p0) {
serviceImplementation.createWorkspace( p0 );
}
public void removeWorkspace(java.lang.String p0) {
serviceImplementation.removeWorkspace( p0 );
}
public void updateWorkspace(java.lang.String p0,
java.lang.String[] p1,
java.lang.String[] p2) {
serviceImplementation.updateWorkspace( p0,
p1,
p2 );
}
public void updateDependency(java.lang.String p0,
java.lang.String p1) {
moduleService.updateDependency( p0,
p1 );
}
public java.lang.String[] getDependencies(java.lang.String p0) {
return moduleService.getDependencies( p0 );
}
public java.lang.String checkinVersion(org.drools.guvnor.client.rpc.Asset p0) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.checkinVersion( p0 );
}
public void restoreVersion(java.lang.String p0,
java.lang.String p1,
java.lang.String p2) {
assetService.restoreVersion( p0,
p1,
p2 );
}
public java.lang.String[] listStates() throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.listStates();
}
public java.lang.String createState(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.createState( p0 );
}
public void renameState(java.lang.String p0,
java.lang.String p1) throws com.google.gwt.user.client.rpc.SerializationException {
serviceImplementation.renameState( p0,
p1 );
}
public void removeState(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
serviceImplementation.removeState( p0 );
}
public org.drools.ide.common.client.modeldriven.SuggestionCompletionEngine loadSuggestionCompletionEngine(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.loadSuggestionCompletionEngine( p0 );
}
public java.lang.String[] getCustomSelectors() throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.getCustomSelectors();
}
public org.drools.guvnor.client.rpc.PageResponse showLog(org.drools.guvnor.client.rpc.PageRequest p0) {
return serviceImplementation.showLog( p0 );
}
public org.drools.guvnor.client.rpc.LogEntry[] showLog() {
return serviceImplementation.showLog();
}
public void cleanLog() {
serviceImplementation.cleanLog();
}
public java.lang.String[] loadDropDownExpression(java.lang.String[] p0,
java.lang.String p1) {
return serviceImplementation.loadDropDownExpression( p0,
p1 );
}
public org.drools.guvnor.client.rpc.PageResponse queryFullText(org.drools.guvnor.client.rpc.QueryPageRequest p0) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.queryFullText( p0 );
}
public org.drools.guvnor.client.rpc.PageResponse queryMetaData(org.drools.guvnor.client.rpc.QueryMetadataPageRequest p0) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.queryMetaData( p0 );
}
public org.drools.guvnor.client.rpc.TableDataResult queryMetaData(org.drools.guvnor.client.rpc.MetaDataQuery[] p0,
java.util.Date p1,
java.util.Date p2,
java.util.Date p3,
java.util.Date p4,
boolean p5,
int p6,
int p7) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.queryMetaData( p0,
p1,
p2,
p3,
p4,
p5,
p6,
p7 );
}
public org.drools.guvnor.client.rpc.PageResponse listUserPermissions(org.drools.guvnor.client.rpc.PageRequest p0) throws org.drools.guvnor.client.rpc.DetailedSerializationException {
return serviceImplementation.listUserPermissions( p0 );
}
public java.util.Map listUserPermissions() throws org.drools.guvnor.client.rpc.DetailedSerializationException {
return serviceImplementation.listUserPermissions();
}
public java.util.Map retrieveUserPermissions(java.lang.String p0) {
return serviceImplementation.retrieveUserPermissions( p0 );
}
public void updateUserPermissions(java.lang.String p0,
java.util.Map p1) {
serviceImplementation.updateUserPermissions( p0,
p1 );
}
public java.lang.String[] listAvailablePermissionTypes() {
return serviceImplementation.listAvailablePermissionTypes();
}
public List<String> listAvailablePermissionRoleTypes() {
return serviceImplementation.listAvailablePermissionRoleTypes();
}
public boolean isDoNotInstallSample() {
Module[] modules = moduleService.listModules();
return modules.length != 1 || serviceImplementation.isDoNotInstallSample();
}
public void setDoNotInstallSample() {
serviceImplementation.setDoNotInstallSample();
}
public void deleteUser(java.lang.String p0) {
serviceImplementation.deleteUser( p0 );
}
public void createUser(java.lang.String p0) {
serviceImplementation.createUser( p0 );
}
public java.util.List subscribe() {
return serviceImplementation.subscribe();
}
public org.drools.guvnor.client.rpc.TableDataResult loadInbox(java.lang.String p0) throws org.drools.guvnor.client.rpc.DetailedSerializationException {
return serviceImplementation.loadInbox( p0 );
}
public org.drools.guvnor.client.rpc.PageResponse loadInbox(org.drools.guvnor.client.rpc.InboxPageRequest p0) throws org.drools.guvnor.client.rpc.DetailedSerializationException {
return serviceImplementation.loadInbox( p0 );
}
public java.lang.String processTemplate(java.lang.String p0,
java.util.Map p1) {
return serviceImplementation.processTemplate( p0,
p1 );
}
public java.util.Map loadSpringContextElementData() throws org.drools.guvnor.client.rpc.DetailedSerializationException {
return serviceImplementation.loadSpringContextElementData();
}
public java.util.Map loadWorkitemDefinitionElementData() throws org.drools.guvnor.client.rpc.DetailedSerializationException {
return serviceImplementation.loadWorkitemDefinitionElementData();
}
public Set<PortableWorkDefinition> loadWorkItemDefinitions(String p0) throws org.drools.guvnor.client.rpc.DetailedSerializationException {
return serviceImplementation.loadWorkItemDefinitions( p0 );
}
public boolean doesAssetExistInModule(java.lang.String p0,
java.lang.String p1) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.doesAssetExistInModule( p0,
p1 );
}
public ConversionResult convertAsset(String uuid,
String targetFormat) throws SerializationException {
return assetService.convertAsset( uuid,
targetFormat );
}
}
| guvnor-webapp-core/src/main/java/org/drools/guvnor/server/RepositoryServiceServlet.java | /*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.guvnor.server;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.List;
import java.util.Set;
import javax.inject.Inject;
import javax.servlet.http.HttpServletResponse;
import org.drools.guvnor.client.rpc.AssetService;
import org.drools.guvnor.client.rpc.CategoryService;
import org.drools.guvnor.client.rpc.ConversionResult;
import org.drools.guvnor.client.rpc.Module;
import org.drools.guvnor.client.rpc.ModuleService;
import org.drools.guvnor.client.rpc.RepositoryService;
import org.drools.guvnor.client.rpc.SnapshotInfo;
import org.drools.guvnor.server.util.LoggingHelper;
import org.drools.ide.common.client.modeldriven.testing.Scenario;
import org.drools.ide.common.shared.workitems.PortableWorkDefinition;
import org.drools.repository.RulesRepositoryException;
import org.jboss.seam.security.AuthorizationException;
import com.google.gwt.user.client.rpc.SerializationException;
import com.google.gwt.user.server.rpc.RemoteServiceServlet;
/**
* GWT RPC service endpoint for Repository service. A place to hang some exception handling mainly.
* This passes on all requests unmolested to the underlying ServiceImplemention class.
*/
public class RepositoryServiceServlet extends RemoteServiceServlet
implements
RepositoryService,
AssetService,
ModuleService,
CategoryService {
private static final long serialVersionUID = 495822L;
private static final LoggingHelper log = LoggingHelper.getLogger( RepositoryServiceServlet.class );
@Inject
private ServiceImplementation serviceImplementation;
@Inject
private RepositoryAssetService assetService;
@Inject
private RepositoryModuleService moduleService;
@Inject
private RepositoryCategoryService categoryService;
@Override
protected void doUnexpectedFailure(Throwable e) {
if ( e.getCause() instanceof AuthorizationException ) {
HttpServletResponse response = getThreadLocalResponse();
PrintWriter writer = null;
try {
writer = response.getWriter();
log.error( e.getMessage(),
e.getCause() );
e.printStackTrace();
response.setContentType( "text/plain" );
response.setStatus( HttpServletResponse.SC_UNAUTHORIZED );
writer.write( "Sorry, insufficient permissions to perform this action." );
} catch (IOException ex) {
getServletContext().log( "respondWithUnexpectedFailure failed while sending the previous failure to the client",
ex );
} finally {
close( writer );
}
} else if ( e.getCause() instanceof RulesRepositoryException ) {
log.error( e.getMessage(),
e.getCause() );
sendErrorMessage( e.getCause().getMessage() );
} else {
if ( e.getCause() != null ) {
log.error( e.getMessage(),
e.getCause() );
} else {
log.error( e.getMessage(),
e );
}
sendErrorMessage( "Sorry, a technical error occurred. Please contact a system administrator." );
}
}
private void close(PrintWriter writer) {
if ( writer != null ) {
writer.flush();
writer.close();
}
}
private void sendErrorMessage(String msg) {
HttpServletResponse response = getThreadLocalResponse();
response.setContentType( "text/plain" );
response.setStatus( HttpServletResponse.SC_BAD_REQUEST );
PrintWriter writer = null;
try {
writer = response.getWriter();
writer.write( msg );
} catch (IOException ex) {
getServletContext().log( "respondWithUnexpectedFailure failed while sending the previous failure to the client",
ex );
} finally {
close( writer );
}
}
/**
* PLACE THE FOLLOWING IN RepositoryServiceServlet.java *
*/
public java.lang.String[] loadChildCategories(java.lang.String p0) {
return categoryService.loadChildCategories( p0 );
}
public org.drools.guvnor.client.rpc.TableDataResult loadRuleListForCategories(java.lang.String p0, int p1, int p2, java.lang.String p3) throws com.google.gwt.user.client.rpc.SerializationException {
return categoryService.loadRuleListForCategories( p0, p1, p2, p3 );
}
public org.drools.guvnor.client.rpc.PageResponse loadRuleListForCategories(org.drools.guvnor.client.rpc.CategoryPageRequest p0) throws com.google.gwt.user.client.rpc.SerializationException {
return categoryService.loadRuleListForCategories( p0 );
}
public java.lang.Boolean createCategory(java.lang.String p0, java.lang.String p1, java.lang.String p2) {
return categoryService.createCategory( p0, p1, p2 );
}
public void removeCategory(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
categoryService.removeCategory( p0 );
}
public void renameCategory(java.lang.String p0, java.lang.String p1) {
categoryService.renameCategory( p0, p1 );
}
/**
* PLACE THE FOLLOWING IN RepositoryServiceServlet.java *
*/
public org.drools.guvnor.client.rpc.Module[] listModules(java.lang.String p0) {
return moduleService.listModules( p0 );
}
public org.drools.guvnor.client.rpc.Module[] listModules() {
return moduleService.listModules();
}
public org.drools.guvnor.client.rpc.Module[] listArchivedModules() {
return moduleService.listArchivedModules();
}
public org.drools.guvnor.client.rpc.Module loadGlobalModule() {
return moduleService.loadGlobalModule();
}
public SnapshotInfo loadSnapshotInfo(String moduleName, String snapshotName) {
return moduleService.loadSnapshotInfo( moduleName, snapshotName );
}
public java.lang.String createModule(java.lang.String p0,
java.lang.String p1,
java.lang.String p2) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.createModule( p0,
p1, p2);
}
public java.lang.String createSubModule(java.lang.String p0,
java.lang.String p1,
java.lang.String p2) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.createSubModule( p0,
p1,
p2 );
}
public org.drools.guvnor.client.rpc.Module loadModule(java.lang.String p0) {
return moduleService.loadModule( p0 );
}
public org.drools.guvnor.client.rpc.ValidatedResponse validateModule(org.drools.guvnor.client.rpc.Module p0) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.validateModule( p0 );
}
public void saveModule(org.drools.guvnor.client.rpc.Module p0) throws com.google.gwt.user.client.rpc.SerializationException {
moduleService.saveModule( p0 );
}
public void createModuleSnapshot(java.lang.String p0,
java.lang.String p1,
boolean p2,
java.lang.String p3) {
moduleService.createModuleSnapshot( p0,
p1,
p2,
p3 );
}
public void copyOrRemoveSnapshot(java.lang.String p0,
java.lang.String p1,
boolean p2,
java.lang.String p3) throws com.google.gwt.user.client.rpc.SerializationException {
moduleService.copyOrRemoveSnapshot( p0,
p1,
p2,
p3 );
}
public org.drools.guvnor.client.rpc.BuilderResult buildPackage(java.lang.String p0,
boolean p1,
java.lang.String p2,
java.lang.String p3,
java.lang.String p4,
boolean p5,
java.lang.String p6,
java.lang.String p7,
boolean p8,
java.lang.String p9) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.buildPackage( p0,
p1,
p2,
p3,
p4,
p5,
p6,
p7,
p8,
p9 );
}
public java.lang.String buildModuleSource(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.buildModuleSource( p0 );
}
public String copyModule(java.lang.String p0,
java.lang.String p1) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.copyModule( p0,
p1 );
}
public void removeModule(java.lang.String p0) {
moduleService.removeModule( p0 );
}
public java.lang.String renameModule(java.lang.String p0,
java.lang.String p1) {
return moduleService.renameModule( p0,
p1 );
}
public void rebuildSnapshots() throws com.google.gwt.user.client.rpc.SerializationException {
moduleService.rebuildSnapshots();
}
public void rebuildPackages() throws com.google.gwt.user.client.rpc.SerializationException {
moduleService.rebuildPackages();
}
public java.lang.String[] listRulesInPackage(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.listRulesInPackage( p0 );
}
public java.lang.String[] listImagesInModule(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.listImagesInModule( p0 );
}
public org.drools.guvnor.client.rpc.SnapshotInfo[] listSnapshots(java.lang.String p0) {
return moduleService.listSnapshots( p0 );
}
public java.lang.String[] listTypesInPackage(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.listTypesInPackage( p0 );
}
public void installSampleRepository() throws com.google.gwt.user.client.rpc.SerializationException {
moduleService.installSampleRepository();
}
public org.drools.guvnor.client.rpc.SnapshotDiffs compareSnapshots(java.lang.String p0,
java.lang.String p1,
java.lang.String p2) {
return moduleService.compareSnapshots( p0,
p1,
p2 );
}
public org.drools.guvnor.client.rpc.SnapshotComparisonPageResponse compareSnapshots(org.drools.guvnor.client.rpc.SnapshotComparisonPageRequest p0) {
return moduleService.compareSnapshots( p0 );
}
public org.drools.guvnor.client.rpc.SingleScenarioResult runScenario(java.lang.String p0,
Scenario p1) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.runScenario( p0,
p1 );
}
public org.drools.guvnor.client.rpc.BulkTestRunResult runScenariosInPackage(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
return moduleService.runScenariosInPackage( p0 );
}
/**
* PLACE THE FOLLOWING IN RepositoryServiceServlet.java *
*/
public org.drools.guvnor.client.rpc.PageResponse quickFindAsset(org.drools.guvnor.client.rpc.QueryPageRequest p0) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.quickFindAsset( p0 );
}
public org.drools.guvnor.client.rpc.TableDataResult quickFindAsset(java.lang.String p0,
boolean p1,
int p2,
int p3) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.quickFindAsset( p0,
p1,
p2,
p3 );
}
public org.drools.guvnor.client.rpc.TableDataResult queryFullText(java.lang.String p0,
boolean p1,
int p2,
int p3) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.queryFullText( p0,
p1,
p2,
p3 );
}
public java.lang.String getAssetLockerUserName(java.lang.String p0) {
return assetService.getAssetLockerUserName( p0 );
}
public void lockAsset(java.lang.String p0) {
assetService.lockAsset( p0 );
}
public void unLockAsset(java.lang.String p0) {
assetService.unLockAsset( p0 );
}
public void archiveAsset(java.lang.String p0) {
assetService.archiveAsset( p0 );
}
public void unArchiveAsset(java.lang.String p0) {
assetService.unArchiveAsset( p0 );
}
public void archiveAssets(java.lang.String[] p0,
boolean p1) {
assetService.archiveAssets( p0,
p1 );
}
public void removeAsset(java.lang.String p0) {
assetService.removeAsset( p0 );
}
public void removeAssets(java.lang.String[] p0) {
assetService.removeAssets( p0 );
}
public java.lang.String buildAssetSource(org.drools.guvnor.client.rpc.Asset p0) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.buildAssetSource( p0 );
}
public org.drools.guvnor.client.rpc.BuilderResult validateAsset(org.drools.guvnor.client.rpc.Asset p0) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.validateAsset( p0 );
}
public java.lang.String renameAsset(java.lang.String p0,
java.lang.String p1) {
return assetService.renameAsset( p0,
p1 );
}
public org.drools.guvnor.client.rpc.Asset loadRuleAsset(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.loadRuleAsset( p0 );
}
public org.drools.guvnor.client.rpc.Asset[] loadRuleAssets(java.lang.String[] p0) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.loadRuleAssets( p0 );
}
public org.drools.guvnor.client.rpc.TableDataResult loadAssetHistory(java.lang.String p0,
java.lang.String p1) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.loadAssetHistory( p0,
p1 );
}
public org.drools.guvnor.client.rpc.TableDataResult loadItemHistory(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.loadItemHistory( p0 );
}
public org.drools.guvnor.client.rpc.PageResponse loadArchivedAssets(org.drools.guvnor.client.rpc.PageRequest p0) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.loadArchivedAssets( p0 );
}
public org.drools.guvnor.client.rpc.TableDataResult loadArchivedAssets(int p0,
int p1) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.loadArchivedAssets( p0,
p1 );
}
public org.drools.guvnor.client.rpc.PageResponse findAssetPage(org.drools.guvnor.client.rpc.AssetPageRequest p0) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.findAssetPage( p0 );
}
public org.drools.guvnor.client.rpc.TableDataResult listAssets(java.lang.String p0,
java.lang.String[] p1,
int p2,
int p3,
java.lang.String p4) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.listAssets( p0,
p1,
p2,
p3,
p4 );
}
public org.drools.guvnor.client.rpc.TableDataResult listAssetsWithPackageName(java.lang.String p0,
java.lang.String[] p1,
int p2,
int p3,
java.lang.String p4) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.listAssetsWithPackageName( p0,
p1,
p2,
p3,
p4 );
}
public java.lang.String copyAsset(java.lang.String p0,
java.lang.String p1,
java.lang.String p2) {
return assetService.copyAsset( p0,
p1,
p2 );
}
public void promoteAssetToGlobalArea(java.lang.String p0) {
assetService.promoteAssetToGlobalArea( p0 );
}
public void changeAssetPackage(java.lang.String p0,
java.lang.String p1,
java.lang.String p2) {
assetService.changeAssetPackage( p0,
p1,
p2 );
}
public void changeState(java.lang.String p0,
java.lang.String p1) {
assetService.changeState( p0,
p1 );
}
public void changePackageState(java.lang.String p0,
java.lang.String p1) {
assetService.changePackageState( p0,
p1 );
}
public java.util.List loadDiscussionForAsset(java.lang.String p0) {
return assetService.loadDiscussionForAsset( p0 );
}
public java.util.List addToDiscussionForAsset(java.lang.String p0,
java.lang.String p1) {
return assetService.addToDiscussionForAsset( p0,
p1 );
}
public void clearAllDiscussionsForAsset(java.lang.String p0) {
assetService.clearAllDiscussionsForAsset( p0 );
}
public long getAssetCount(org.drools.guvnor.client.rpc.AssetPageRequest p0) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.findAssetPage( p0 ).getTotalRowSize();
}
/**
* PLACE THE FOLLOWING IN RepositoryServiceServlet.java *
*/
public org.drools.guvnor.client.rpc.PageResponse loadRuleListForState(org.drools.guvnor.client.rpc.StatePageRequest p0) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.loadRuleListForState( p0 );
}
public org.drools.guvnor.client.rpc.TableDataResult loadRuleListForState(java.lang.String p0,
int p1,
int p2,
java.lang.String p3) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.loadRuleListForState( p0,
p1,
p2,
p3 );
}
public org.drools.guvnor.client.rpc.TableConfig loadTableConfig(java.lang.String p0) {
return serviceImplementation.loadTableConfig( p0 );
}
public java.lang.String createNewRule(java.lang.String p0,
java.lang.String p1,
java.lang.String p2,
java.lang.String p3,
java.lang.String p4) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.createNewRule( p0,
p1,
p2,
p3,
p4 );
}
public String createNewRule(org.drools.guvnor.client.rpc.NewAssetConfiguration p0) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.createNewRule( p0 );
}
public String createNewRule(org.drools.guvnor.client.rpc.NewGuidedDecisionTableAssetConfiguration p0) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.createNewRule( p0 );
}
public java.lang.String createNewImportedRule(java.lang.String p0,
java.lang.String p1) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.createNewImportedRule( p0,
p1 );
}
public void deleteUncheckedRule(java.lang.String p0) {
serviceImplementation.deleteUncheckedRule( p0 );
}
public void clearRulesRepository() {
serviceImplementation.clearRulesRepository();
}
public java.lang.String[] listWorkspaces() {
return serviceImplementation.listWorkspaces();
}
public void createWorkspace(java.lang.String p0) {
serviceImplementation.createWorkspace( p0 );
}
public void removeWorkspace(java.lang.String p0) {
serviceImplementation.removeWorkspace( p0 );
}
public void updateWorkspace(java.lang.String p0,
java.lang.String[] p1,
java.lang.String[] p2) {
serviceImplementation.updateWorkspace( p0,
p1,
p2 );
}
public void updateDependency(java.lang.String p0,
java.lang.String p1) {
moduleService.updateDependency( p0,
p1 );
}
public java.lang.String[] getDependencies(java.lang.String p0) {
return moduleService.getDependencies( p0 );
}
public java.lang.String checkinVersion(org.drools.guvnor.client.rpc.Asset p0) throws com.google.gwt.user.client.rpc.SerializationException {
return assetService.checkinVersion( p0 );
}
public void restoreVersion(java.lang.String p0,
java.lang.String p1,
java.lang.String p2) {
assetService.restoreVersion( p0,
p1,
p2 );
}
public java.lang.String[] listStates() throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.listStates();
}
public java.lang.String createState(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.createState( p0 );
}
public void renameState(java.lang.String p0,
java.lang.String p1) throws com.google.gwt.user.client.rpc.SerializationException {
serviceImplementation.renameState( p0,
p1 );
}
public void removeState(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
serviceImplementation.removeState( p0 );
}
public org.drools.ide.common.client.modeldriven.SuggestionCompletionEngine loadSuggestionCompletionEngine(java.lang.String p0) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.loadSuggestionCompletionEngine( p0 );
}
public java.lang.String[] getCustomSelectors() throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.getCustomSelectors();
}
public org.drools.guvnor.client.rpc.PageResponse showLog(org.drools.guvnor.client.rpc.PageRequest p0) {
return serviceImplementation.showLog( p0 );
}
public org.drools.guvnor.client.rpc.LogEntry[] showLog() {
return serviceImplementation.showLog();
}
public void cleanLog() {
serviceImplementation.cleanLog();
}
public java.lang.String[] loadDropDownExpression(java.lang.String[] p0,
java.lang.String p1) {
return serviceImplementation.loadDropDownExpression( p0,
p1 );
}
public org.drools.guvnor.client.rpc.PageResponse queryFullText(org.drools.guvnor.client.rpc.QueryPageRequest p0) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.queryFullText( p0 );
}
public org.drools.guvnor.client.rpc.PageResponse queryMetaData(org.drools.guvnor.client.rpc.QueryMetadataPageRequest p0) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.queryMetaData( p0 );
}
public org.drools.guvnor.client.rpc.TableDataResult queryMetaData(org.drools.guvnor.client.rpc.MetaDataQuery[] p0,
java.util.Date p1,
java.util.Date p2,
java.util.Date p3,
java.util.Date p4,
boolean p5,
int p6,
int p7) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.queryMetaData( p0,
p1,
p2,
p3,
p4,
p5,
p6,
p7 );
}
public org.drools.guvnor.client.rpc.PageResponse listUserPermissions(org.drools.guvnor.client.rpc.PageRequest p0) throws org.drools.guvnor.client.rpc.DetailedSerializationException {
return serviceImplementation.listUserPermissions( p0 );
}
public java.util.Map listUserPermissions() throws org.drools.guvnor.client.rpc.DetailedSerializationException {
return serviceImplementation.listUserPermissions();
}
public java.util.Map retrieveUserPermissions(java.lang.String p0) {
return serviceImplementation.retrieveUserPermissions( p0 );
}
public void updateUserPermissions(java.lang.String p0,
java.util.Map p1) {
serviceImplementation.updateUserPermissions( p0,
p1 );
}
public java.lang.String[] listAvailablePermissionTypes() {
return serviceImplementation.listAvailablePermissionTypes();
}
public List<String> listAvailablePermissionRoleTypes() {
return serviceImplementation.listAvailablePermissionRoleTypes();
}
public boolean isDoNotInstallSample() {
Module[] modules = moduleService.listModules();
return modules.length != 1 || serviceImplementation.isDoNotInstallSample();
}
public void setDoNotInstallSample() {
serviceImplementation.setDoNotInstallSample();
}
public void deleteUser(java.lang.String p0) {
serviceImplementation.deleteUser( p0 );
}
public void createUser(java.lang.String p0) {
serviceImplementation.createUser( p0 );
}
public java.util.List subscribe() {
return serviceImplementation.subscribe();
}
public org.drools.guvnor.client.rpc.TableDataResult loadInbox(java.lang.String p0) throws org.drools.guvnor.client.rpc.DetailedSerializationException {
return serviceImplementation.loadInbox( p0 );
}
public org.drools.guvnor.client.rpc.PageResponse loadInbox(org.drools.guvnor.client.rpc.InboxPageRequest p0) throws org.drools.guvnor.client.rpc.DetailedSerializationException {
return serviceImplementation.loadInbox( p0 );
}
public java.lang.String processTemplate(java.lang.String p0,
java.util.Map p1) {
return serviceImplementation.processTemplate( p0,
p1 );
}
public java.util.Map loadSpringContextElementData() throws org.drools.guvnor.client.rpc.DetailedSerializationException {
return serviceImplementation.loadSpringContextElementData();
}
public java.util.Map loadWorkitemDefinitionElementData() throws org.drools.guvnor.client.rpc.DetailedSerializationException {
return serviceImplementation.loadWorkitemDefinitionElementData();
}
public Set<PortableWorkDefinition> loadWorkItemDefinitions(String p0) throws org.drools.guvnor.client.rpc.DetailedSerializationException {
return serviceImplementation.loadWorkItemDefinitions( p0 );
}
public boolean doesAssetExistInModule(java.lang.String p0,
java.lang.String p1) throws com.google.gwt.user.client.rpc.SerializationException {
return serviceImplementation.doesAssetExistInModule( p0,
p1 );
}
public ConversionResult convertAsset(String uuid,
String targetFormat) throws SerializationException {
return assetService.convertAsset( uuid,
targetFormat );
}
}
| BZ724536: Show meaningful response to user when not logged in | guvnor-webapp-core/src/main/java/org/drools/guvnor/server/RepositoryServiceServlet.java | BZ724536: Show meaningful response to user when not logged in |
|
Java | apache-2.0 | f0cc51195e090df3a4557dc27051d9586b87d07c | 0 | protyposis/Spectaculum,protyposis/Spectaculum | /*
* Copyright 2014 Mario Guggenberger <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.protyposis.android.spectaculumdemo;
import android.app.Activity;
import android.graphics.BitmapFactory;
import android.view.Menu;
import android.view.MenuItem;
import android.view.SubMenu;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.List;
import net.protyposis.android.spectaculum.SpectaculumView;
import net.protyposis.android.spectaculum.effects.Parameter;
import net.protyposis.android.spectaculum.effects.ImmersiveSensorNavigation;
import net.protyposis.android.spectaculum.effects.ContrastBrightnessAdjustmentEffect;
import net.protyposis.android.spectaculum.effects.EffectException;
import net.protyposis.android.spectaculum.effects.FlowAbsSubEffect;
import net.protyposis.android.spectaculum.effects.QrMarkerEffect;
import net.protyposis.android.spectaculum.effects.Effect;
import net.protyposis.android.spectaculum.effects.FlipEffect;
import net.protyposis.android.spectaculum.effects.FlowAbsEffect;
import net.protyposis.android.spectaculum.effects.KernelBlurEffect;
import net.protyposis.android.spectaculum.effects.KernelEdgeDetectEffect;
import net.protyposis.android.spectaculum.effects.KernelEmbossEffect;
import net.protyposis.android.spectaculum.effects.KernelGaussBlurEffect;
import net.protyposis.android.spectaculum.effects.KernelSharpenEffect;
import net.protyposis.android.spectaculum.effects.NoEffect;
import net.protyposis.android.spectaculum.effects.SimpleToonEffect;
import net.protyposis.android.spectaculum.effects.SobelEffect;
import net.protyposis.android.spectaculum.effects.ImmersiveTouchNavigation;
import net.protyposis.android.spectaculum.effects.StackEffect;
import net.protyposis.android.spectaculum.effects.WatermarkEffect;
import net.protyposis.android.spectaculum.gles.GLUtils;
import net.protyposis.android.spectaculumdemo.testeffect.ColorFilterEffect;
import net.protyposis.android.spectaculum.effects.ImmersiveEffect;
/**
* Created by Mario on 18.07.2014.
*
* Helper class for easy effect handling in the various Spectaculum views in this demo.
* Provides a list of effects for the actionbar and displays a parameter control panel for
* selected effects with parameters that the demo user can player play with.
*/
public class EffectManager implements SpectaculumView.EffectEventListener, Effect.Listener {
private Activity mActivity;
private ViewGroup mParameterListView;
private EffectParameterListAdapter mParameterListAdapter;
private MenuItem mParameterToggleMenuItem;
private SpectaculumView mSpectaculumView;
private List<Effect> mEffects;
private Effect mSelectedEffect;
private ImmersiveSensorNavigation mImmersiveSensorNavigation;
private ImmersiveTouchNavigation mImmersiveTouchNavigation;
public EffectManager(Activity activity, int parameterListViewId, SpectaculumView glView) {
mActivity = activity;
mParameterListView = (ViewGroup) activity.findViewById(parameterListViewId);
mSpectaculumView = glView;
mEffects = new ArrayList<>();
mSpectaculumView.setEffectEventListener(this);
// Spectaculum-Core filters
mEffects.add(new NoEffect());
mEffects.add(new FlipEffect());
mEffects.add(new SobelEffect());
mEffects.add(new SimpleToonEffect());
mEffects.add(new KernelBlurEffect());
mEffects.add(new KernelGaussBlurEffect());
mEffects.add(new KernelEdgeDetectEffect());
mEffects.add(new KernelEmbossEffect());
mEffects.add(new KernelSharpenEffect());
mEffects.add(new ContrastBrightnessAdjustmentEffect());
WatermarkEffect watermarkEffect = new WatermarkEffect();
watermarkEffect.setWatermark(BitmapFactory.decodeResource(mActivity.getResources(), R.mipmap.ic_launcher));
mEffects.add(watermarkEffect);
// Create a filter stack with multiple effects
mEffects.add(new StackEffect("Stack: Toon, Contrast/Brightness, Watermark",
new SimpleToonEffect(), new ContrastBrightnessAdjustmentEffect(), watermarkEffect));
// custom filters
mEffects.add(new ColorFilterEffect());
// Immersive filters
mEffects.add(new ImmersiveEffect());
// FlowAbs filters
FlowAbsEffect flowAbsEffect = new FlowAbsEffect();
mEffects.add(flowAbsEffect);
mEffects.add(flowAbsEffect.getNoiseTextureEffect());
mEffects.add(flowAbsEffect.getGaussEffect());
mEffects.add(flowAbsEffect.getSmoothEffect());
mEffects.add(flowAbsEffect.getBilateralFilterEffect());
mEffects.add(flowAbsEffect.getColorQuantizationEffect());
mEffects.add(flowAbsEffect.getDOGEffect());
mEffects.add(flowAbsEffect.getFDOGEffect());
mEffects.add(flowAbsEffect.getTangentFlowMapEffect());
// QrMarker filters
QrMarkerEffect qrMarkerEffect = new QrMarkerEffect();
//mEffects.add(qrMarkerEffect);
mEffects.add(qrMarkerEffect.getCannyEdgeEffect());
}
public void addEffects() {
mSpectaculumView.addEffect(mEffects.toArray(new Effect[mEffects.size()]));
}
public List<String> getEffectNames() {
List<String> effectNames = new ArrayList<>();
for(Effect effect : mEffects) {
effectNames.add(effect.getName());
}
return effectNames;
}
public boolean selectEffect(int index) {
if(mSelectedEffect != null) {
// Remove listener from previously selected effect
mSelectedEffect.setListener(null);
if (mSelectedEffect instanceof ImmersiveEffect) {
if (mImmersiveSensorNavigation != null) {
mImmersiveSensorNavigation.deactivate();
mImmersiveSensorNavigation.detach();
mImmersiveSensorNavigation = null;
}
if (mImmersiveTouchNavigation != null) {
mImmersiveTouchNavigation.deactivate();
mImmersiveTouchNavigation.detach();
mImmersiveTouchNavigation = null;
}
}
}
Effect effect = mEffects.get(index);
if(effect instanceof FlowAbsEffect || effect instanceof FlowAbsSubEffect) {
if(GLUtils.HAS_GPU_TEGRA) {
Toast.makeText(mActivity, "FlowAbs deactivated (the Tegra GPU of this device does not support the required dynamic loops in shaders)", Toast.LENGTH_SHORT).show();
return false;
} else if(!GLUtils.HAS_FLOAT_FRAMEBUFFER_SUPPORT) {
Toast.makeText(mActivity, "FlowAbs effects do not render correctly on this device (GPU does not support fp framebuffer attachments)", Toast.LENGTH_SHORT).show();
}
}
mSelectedEffect = effect;
mSpectaculumView.selectEffect(index);
return true;
}
public Effect getSelectedEffect() {
return mSelectedEffect;
}
public void addToMenu(Menu menu) {
SubMenu submenu = menu.findItem(R.id.action_list_effects).getSubMenu();
int count = 0;
for(String effectName : getEffectNames()) {
submenu.add(R.id.action_list_effects, count++, Menu.NONE, effectName);
}
mParameterToggleMenuItem = menu.findItem(R.id.action_toggle_parameters);
}
private boolean doMenuActionEffect(MenuItem item) {
if(item.getGroupId() == R.id.action_list_effects) {
return selectEffect(item.getItemId());
}
return false;
}
public boolean doMenuActions(MenuItem item) {
if(doMenuActionEffect(item)) {
return true;
} else if(item.getItemId() == R.id.action_toggle_parameters) {
mParameterListView.setVisibility(mParameterListView.getVisibility() == View.VISIBLE ? View.GONE : View.VISIBLE);
return true;
}
return false;
}
public void viewEffectParameters(Effect effect) {
if(effect.hasParameters()) {
mParameterListAdapter = new EffectParameterListAdapter(mActivity, mSpectaculumView, effect.getParameters());
mParameterListView.removeAllViews();
for(int i = 0; i < mParameterListAdapter.getCount(); i++) {
mParameterListView.addView(mParameterListAdapter.getView(i, null, null));
}
mParameterListView.setVisibility(View.VISIBLE);
mParameterToggleMenuItem.setEnabled(true);
} else {
mParameterListView.setVisibility(View.GONE);
mParameterListView.removeAllViews();
if(mParameterListAdapter != null) {
mParameterListAdapter.clear();
mParameterListAdapter = null;
}
mParameterToggleMenuItem.setEnabled(false);
}
}
public void onPause() {
if(mImmersiveSensorNavigation != null) {
mImmersiveSensorNavigation.deactivate();
}
if(mImmersiveTouchNavigation != null) {
mImmersiveTouchNavigation.deactivate();
}
}
public void onResume() {
if(mImmersiveSensorNavigation != null) {
mImmersiveSensorNavigation.activate();
}
if(mImmersiveTouchNavigation != null) {
mImmersiveTouchNavigation.activate();
}
}
@Override
public void onEffectInitialized(int index, final Effect effect) {
// nothing to do here
}
@Override
public void onEffectSelected(int index, Effect effect) {
effect.setListener(this); // add listener so callback below get called
viewEffectParameters(getSelectedEffect());
if(effect instanceof ImmersiveEffect) {
if(mImmersiveSensorNavigation == null) {
// Create sensor navigation instance in a try/catch block because it fails
// if no rotation sensor is available.
try {
mImmersiveSensorNavigation = new ImmersiveSensorNavigation(mActivity);
mImmersiveSensorNavigation.attachTo((ImmersiveEffect) effect);
} catch (RuntimeException e) {
Toast.makeText(mActivity, e.getMessage(), Toast.LENGTH_LONG).show();
}
}
if(mImmersiveTouchNavigation == null) {
mImmersiveTouchNavigation = new ImmersiveTouchNavigation(mSpectaculumView);
mImmersiveTouchNavigation.attachTo((ImmersiveEffect) effect);
}
}
}
@Override
public void onEffectError(int index, final Effect effect, final EffectException e) {
if(e.getCause() != null) {
Throwable cause = e.getCause();
Toast.makeText(mActivity, "Effect " + cause.getClass().getSimpleName() + ": " + cause.getMessage(), Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(mActivity, "EffectException: " + e.getMessage(), Toast.LENGTH_SHORT).show();
}
selectEffect(0); // select the NoEffect to get rid of the parameter control panel of the failed effect
}
@Override
public void onEffectChanged(Effect effect) {
/*
*Because we set this class as the event listener for effects, we overwrite SpectaculumView's
* internal event listener and must forward the calls to the view. This is true for all following
* event listener methods.
* TODO find a solution, maybe permit a list of event listeners on effects? what would be the performance implication?
*/
mSpectaculumView.onEffectChanged(effect);
}
@Override
public void onParameterAdded(Effect effect, Parameter parameter) {
mSpectaculumView.onParameterAdded(effect, parameter); // see onEffectChanged
// refresh the parameter control panel
viewEffectParameters(getSelectedEffect());
}
@Override
public void onParameterRemoved(Effect effect, Parameter parameter) {
mSpectaculumView.onParameterAdded(effect, parameter); // see onEffectChanged
// refresh the parameter control panel
viewEffectParameters(getSelectedEffect());
}
}
| Spectaculum-Demo/src/main/java/net/protyposis/android/spectaculumdemo/EffectManager.java | /*
* Copyright 2014 Mario Guggenberger <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.protyposis.android.spectaculumdemo;
import android.app.Activity;
import android.graphics.BitmapFactory;
import android.view.Menu;
import android.view.MenuItem;
import android.view.SubMenu;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.List;
import net.protyposis.android.spectaculum.SpectaculumView;
import net.protyposis.android.spectaculum.effects.Parameter;
import net.protyposis.android.spectaculum.effects.ImmersiveSensorNavigation;
import net.protyposis.android.spectaculum.effects.ContrastBrightnessAdjustmentEffect;
import net.protyposis.android.spectaculum.effects.EffectException;
import net.protyposis.android.spectaculum.effects.FlowAbsSubEffect;
import net.protyposis.android.spectaculum.effects.QrMarkerEffect;
import net.protyposis.android.spectaculum.effects.Effect;
import net.protyposis.android.spectaculum.effects.FlipEffect;
import net.protyposis.android.spectaculum.effects.FlowAbsEffect;
import net.protyposis.android.spectaculum.effects.KernelBlurEffect;
import net.protyposis.android.spectaculum.effects.KernelEdgeDetectEffect;
import net.protyposis.android.spectaculum.effects.KernelEmbossEffect;
import net.protyposis.android.spectaculum.effects.KernelGaussBlurEffect;
import net.protyposis.android.spectaculum.effects.KernelSharpenEffect;
import net.protyposis.android.spectaculum.effects.NoEffect;
import net.protyposis.android.spectaculum.effects.SimpleToonEffect;
import net.protyposis.android.spectaculum.effects.SobelEffect;
import net.protyposis.android.spectaculum.effects.ImmersiveTouchNavigation;
import net.protyposis.android.spectaculum.effects.WatermarkEffect;
import net.protyposis.android.spectaculum.gles.GLUtils;
import net.protyposis.android.spectaculumdemo.testeffect.ColorFilterEffect;
import net.protyposis.android.spectaculum.effects.ImmersiveEffect;
/**
* Created by Mario on 18.07.2014.
*
* Helper class for easy effect handling in the various Spectaculum views in this demo.
* Provides a list of effects for the actionbar and displays a parameter control panel for
* selected effects with parameters that the demo user can player play with.
*/
public class EffectManager implements SpectaculumView.EffectEventListener, Effect.Listener {
private Activity mActivity;
private ViewGroup mParameterListView;
private EffectParameterListAdapter mParameterListAdapter;
private MenuItem mParameterToggleMenuItem;
private SpectaculumView mSpectaculumView;
private List<Effect> mEffects;
private Effect mSelectedEffect;
private ImmersiveSensorNavigation mImmersiveSensorNavigation;
private ImmersiveTouchNavigation mImmersiveTouchNavigation;
public EffectManager(Activity activity, int parameterListViewId, SpectaculumView glView) {
mActivity = activity;
mParameterListView = (ViewGroup) activity.findViewById(parameterListViewId);
mSpectaculumView = glView;
mEffects = new ArrayList<>();
mSpectaculumView.setEffectEventListener(this);
// MediaPlayer-GLES filters
mEffects.add(new NoEffect());
mEffects.add(new FlipEffect());
mEffects.add(new SobelEffect());
mEffects.add(new SimpleToonEffect());
mEffects.add(new KernelBlurEffect());
mEffects.add(new KernelGaussBlurEffect());
mEffects.add(new KernelEdgeDetectEffect());
mEffects.add(new KernelEmbossEffect());
mEffects.add(new KernelSharpenEffect());
mEffects.add(new ContrastBrightnessAdjustmentEffect());
WatermarkEffect watermarkEffect = new WatermarkEffect();
watermarkEffect.setWatermark(BitmapFactory.decodeResource(mActivity.getResources(), R.mipmap.ic_launcher));
mEffects.add(watermarkEffect);
// custom filters
mEffects.add(new ColorFilterEffect());
// Immersive filters
mEffects.add(new ImmersiveEffect());
// FlowAbs filters
FlowAbsEffect flowAbsEffect = new FlowAbsEffect();
mEffects.add(flowAbsEffect);
mEffects.add(flowAbsEffect.getNoiseTextureEffect());
mEffects.add(flowAbsEffect.getGaussEffect());
mEffects.add(flowAbsEffect.getSmoothEffect());
mEffects.add(flowAbsEffect.getBilateralFilterEffect());
mEffects.add(flowAbsEffect.getColorQuantizationEffect());
mEffects.add(flowAbsEffect.getDOGEffect());
mEffects.add(flowAbsEffect.getFDOGEffect());
mEffects.add(flowAbsEffect.getTangentFlowMapEffect());
// QrMarker filters
QrMarkerEffect qrMarkerEffect = new QrMarkerEffect();
//mEffects.add(qrMarkerEffect);
mEffects.add(qrMarkerEffect.getCannyEdgeEffect());
}
public void addEffects() {
mSpectaculumView.addEffect(mEffects.toArray(new Effect[mEffects.size()]));
}
public List<String> getEffectNames() {
List<String> effectNames = new ArrayList<>();
for(Effect effect : mEffects) {
effectNames.add(effect.getName());
}
return effectNames;
}
public boolean selectEffect(int index) {
if(mSelectedEffect != null) {
// Remove listener from previously selected effect
mSelectedEffect.setListener(null);
if (mSelectedEffect instanceof ImmersiveEffect) {
if (mImmersiveSensorNavigation != null) {
mImmersiveSensorNavigation.deactivate();
mImmersiveSensorNavigation.detach();
mImmersiveSensorNavigation = null;
}
if (mImmersiveTouchNavigation != null) {
mImmersiveTouchNavigation.deactivate();
mImmersiveTouchNavigation.detach();
mImmersiveTouchNavigation = null;
}
}
}
Effect effect = mEffects.get(index);
if(effect instanceof FlowAbsEffect || effect instanceof FlowAbsSubEffect) {
if(GLUtils.HAS_GPU_TEGRA) {
Toast.makeText(mActivity, "FlowAbs deactivated (the Tegra GPU of this device does not support the required dynamic loops in shaders)", Toast.LENGTH_SHORT).show();
return false;
} else if(!GLUtils.HAS_FLOAT_FRAMEBUFFER_SUPPORT) {
Toast.makeText(mActivity, "FlowAbs effects do not render correctly on this device (GPU does not support fp framebuffer attachments)", Toast.LENGTH_SHORT).show();
}
}
mSelectedEffect = effect;
mSpectaculumView.selectEffect(index);
return true;
}
public Effect getSelectedEffect() {
return mSelectedEffect;
}
public void addToMenu(Menu menu) {
SubMenu submenu = menu.findItem(R.id.action_list_effects).getSubMenu();
int count = 0;
for(String effectName : getEffectNames()) {
submenu.add(R.id.action_list_effects, count++, Menu.NONE, effectName);
}
mParameterToggleMenuItem = menu.findItem(R.id.action_toggle_parameters);
}
private boolean doMenuActionEffect(MenuItem item) {
if(item.getGroupId() == R.id.action_list_effects) {
return selectEffect(item.getItemId());
}
return false;
}
public boolean doMenuActions(MenuItem item) {
if(doMenuActionEffect(item)) {
return true;
} else if(item.getItemId() == R.id.action_toggle_parameters) {
mParameterListView.setVisibility(mParameterListView.getVisibility() == View.VISIBLE ? View.GONE : View.VISIBLE);
return true;
}
return false;
}
public void viewEffectParameters(Effect effect) {
if(effect.hasParameters()) {
mParameterListAdapter = new EffectParameterListAdapter(mActivity, mSpectaculumView, effect.getParameters());
mParameterListView.removeAllViews();
for(int i = 0; i < mParameterListAdapter.getCount(); i++) {
mParameterListView.addView(mParameterListAdapter.getView(i, null, null));
}
mParameterListView.setVisibility(View.VISIBLE);
mParameterToggleMenuItem.setEnabled(true);
} else {
mParameterListView.setVisibility(View.GONE);
mParameterListView.removeAllViews();
if(mParameterListAdapter != null) {
mParameterListAdapter.clear();
mParameterListAdapter = null;
}
mParameterToggleMenuItem.setEnabled(false);
}
}
public void onPause() {
if(mImmersiveSensorNavigation != null) {
mImmersiveSensorNavigation.deactivate();
}
if(mImmersiveTouchNavigation != null) {
mImmersiveTouchNavigation.deactivate();
}
}
public void onResume() {
if(mImmersiveSensorNavigation != null) {
mImmersiveSensorNavigation.activate();
}
if(mImmersiveTouchNavigation != null) {
mImmersiveTouchNavigation.activate();
}
}
@Override
public void onEffectInitialized(int index, final Effect effect) {
// nothing to do here
}
@Override
public void onEffectSelected(int index, Effect effect) {
effect.setListener(this); // add listener so callback below get called
viewEffectParameters(getSelectedEffect());
if(effect instanceof ImmersiveEffect) {
if(mImmersiveSensorNavigation == null) {
// Create sensor navigation instance in a try/catch block because it fails
// if no rotation sensor is available.
try {
mImmersiveSensorNavigation = new ImmersiveSensorNavigation(mActivity);
mImmersiveSensorNavigation.attachTo((ImmersiveEffect) effect);
} catch (RuntimeException e) {
Toast.makeText(mActivity, e.getMessage(), Toast.LENGTH_LONG).show();
}
}
if(mImmersiveTouchNavigation == null) {
mImmersiveTouchNavigation = new ImmersiveTouchNavigation(mSpectaculumView);
mImmersiveTouchNavigation.attachTo((ImmersiveEffect) effect);
}
}
}
@Override
public void onEffectError(int index, final Effect effect, final EffectException e) {
if(e.getCause() != null) {
Throwable cause = e.getCause();
Toast.makeText(mActivity, "Effect " + cause.getClass().getSimpleName() + ": " + cause.getMessage(), Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(mActivity, "EffectException: " + e.getMessage(), Toast.LENGTH_SHORT).show();
}
selectEffect(0); // select the NoEffect to get rid of the parameter control panel of the failed effect
}
@Override
public void onEffectChanged(Effect effect) {
/*
*Because we set this class as the event listener for effects, we overwrite SpectaculumView's
* internal event listener and must forward the calls to the view. This is true for all following
* event listener methods.
* TODO find a solution, maybe permit a list of event listeners on effects? what would be the performance implication?
*/
mSpectaculumView.onEffectChanged(effect);
}
@Override
public void onParameterAdded(Effect effect, Parameter parameter) {
mSpectaculumView.onParameterAdded(effect, parameter); // see onEffectChanged
// refresh the parameter control panel
viewEffectParameters(getSelectedEffect());
}
@Override
public void onParameterRemoved(Effect effect, Parameter parameter) {
mSpectaculumView.onParameterAdded(effect, parameter); // see onEffectChanged
// refresh the parameter control panel
viewEffectParameters(getSelectedEffect());
}
}
| Add stack effect to demo app
| Spectaculum-Demo/src/main/java/net/protyposis/android/spectaculumdemo/EffectManager.java | Add stack effect to demo app |
|
Java | apache-2.0 | eac5be6eb075e2fab8c0c703a320daaaef850c87 | 0 | werkt/bazel,dslomov/bazel-windows,meteorcloudy/bazel,dslomov/bazel-windows,ulfjack/bazel,davidzchen/bazel,meteorcloudy/bazel,dslomov/bazel-windows,werkt/bazel,ulfjack/bazel,dslomov/bazel,dslomov/bazel,davidzchen/bazel,ButterflyNetwork/bazel,safarmer/bazel,dropbox/bazel,ButterflyNetwork/bazel,dslomov/bazel,davidzchen/bazel,aehlig/bazel,werkt/bazel,davidzchen/bazel,cushon/bazel,twitter-forks/bazel,katre/bazel,safarmer/bazel,werkt/bazel,davidzchen/bazel,aehlig/bazel,ButterflyNetwork/bazel,ButterflyNetwork/bazel,twitter-forks/bazel,katre/bazel,katre/bazel,meteorcloudy/bazel,ulfjack/bazel,perezd/bazel,dropbox/bazel,safarmer/bazel,cushon/bazel,perezd/bazel,davidzchen/bazel,aehlig/bazel,perezd/bazel,aehlig/bazel,akira-baruah/bazel,dslomov/bazel-windows,ulfjack/bazel,twitter-forks/bazel,davidzchen/bazel,dropbox/bazel,katre/bazel,twitter-forks/bazel,twitter-forks/bazel,safarmer/bazel,akira-baruah/bazel,bazelbuild/bazel,twitter-forks/bazel,twitter-forks/bazel,bazelbuild/bazel,meteorcloudy/bazel,ulfjack/bazel,katre/bazel,cushon/bazel,safarmer/bazel,ulfjack/bazel,dropbox/bazel,akira-baruah/bazel,perezd/bazel,dslomov/bazel,werkt/bazel,dropbox/bazel,cushon/bazel,akira-baruah/bazel,bazelbuild/bazel,ButterflyNetwork/bazel,bazelbuild/bazel,dslomov/bazel,akira-baruah/bazel,cushon/bazel,ulfjack/bazel,safarmer/bazel,bazelbuild/bazel,dslomov/bazel-windows,meteorcloudy/bazel,dropbox/bazel,katre/bazel,aehlig/bazel,akira-baruah/bazel,ButterflyNetwork/bazel,perezd/bazel,werkt/bazel,meteorcloudy/bazel,dslomov/bazel,dslomov/bazel-windows,perezd/bazel,aehlig/bazel,cushon/bazel,bazelbuild/bazel,aehlig/bazel,dslomov/bazel,meteorcloudy/bazel,perezd/bazel | // Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.android;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.FileProvider;
import com.google.devtools.build.lib.analysis.skylark.SkylarkRuleContext;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.collect.nestedset.Order;
import com.google.devtools.build.lib.events.Location;
import com.google.devtools.build.lib.packages.NativeInfo;
import com.google.devtools.build.lib.packages.NativeProvider;
import com.google.devtools.build.lib.packages.RuleClass.ConfiguredTargetFactory.RuleErrorException;
import com.google.devtools.build.lib.rules.android.AndroidConfiguration.AndroidAaptVersion;
import com.google.devtools.build.lib.rules.android.AndroidLibraryAarInfo.Aar;
import com.google.devtools.build.lib.rules.java.JavaCompilationInfoProvider;
import com.google.devtools.build.lib.rules.java.JavaInfo;
import com.google.devtools.build.lib.skylarkinterface.Param;
import com.google.devtools.build.lib.skylarkinterface.SkylarkCallable;
import com.google.devtools.build.lib.skylarkinterface.SkylarkModule;
import com.google.devtools.build.lib.syntax.EvalException;
import com.google.devtools.build.lib.syntax.Runtime;
import com.google.devtools.build.lib.syntax.SkylarkDict;
import com.google.devtools.build.lib.syntax.SkylarkList;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.util.List;
import java.util.Objects;
import javax.annotation.Nullable;
/** Skylark-visible methods for working with Android data (manifests, resources, and assets). */
@SkylarkModule(
name = "android_data",
doc =
"Utilities for working with Android data (manifests, resources, and assets). "
+ "This API is non-final and subject to change without warning; do not rely on it.")
public class AndroidSkylarkData {
/**
* Skylark API for getting a asset provider for android_library targets that don't specify assets.
*
* <p>TODO(b/79159379): Stop passing SkylarkRuleContext here
*
* @param ctx the SkylarkRuleContext. We will soon change to using an ActionConstructionContext
* instead. See b/79159379
*/
@SkylarkCallable(
name = "assets_from_deps",
mandatoryPositionals = 1, // context
parameters = {
@Param(
name = "deps",
defaultValue = "[]",
type = SkylarkList.class,
generic1 = AndroidAssetsInfo.class,
positional = false,
named = true,
doc = "Dependencies to inherit assets from"),
@Param(
name = "neverlink",
defaultValue = "False",
type = Boolean.class,
positional = false,
named = true,
doc =
"Defaults to False. If true, assets will not be exposed to targets that depend on"
+ " them.")
},
doc =
"Creates an AndroidAssetsInfo from this target's asset dependencies, ignoring local"
+ " assets. No processing will be done. This method is deprecated and exposed only"
+ " for backwards-compatibility with existing Native behavior.")
public static AndroidAssetsInfo assetsFromDeps(
SkylarkRuleContext ctx, SkylarkList<AndroidAssetsInfo> deps, boolean neverlink)
throws EvalException {
return AssetDependencies.fromProviders(deps, neverlink).toInfo(ctx.getLabel());
}
/**
* Skylark API for getting a resource provider for android_library targets that don't specify
* resources.
*
* <p>TODO(b/79159379): Stop passing SkylarkRuleContext here
*
* @param ctx the SkylarkRuleContext. We will soon change to using an ActionConstructionContext
* instead. See b/79159379
*/
@SkylarkCallable(
name = "resources_from_deps",
mandatoryPositionals = 1, // context
parameters = {
@Param(
name = "deps",
defaultValue = "[]",
type = SkylarkList.class,
generic1 = AndroidResourcesInfo.class,
positional = false,
named = true,
doc = "Dependencies to inherit resources from"),
@Param(
name = "neverlink",
defaultValue = "False",
type = Boolean.class,
positional = false,
named = true,
doc =
"Defaults to False. If true, resources will not be exposed to targets that depend"
+ " on them."),
@Param(
name = "custom_package",
positional = false,
defaultValue = "None",
type = String.class,
noneable = true,
named = true,
doc =
"The Android application package to stamp the manifest with. If not provided, the"
+ " current Java package, derived from the location of this target's BUILD"
+ " file, will be used. For example, given a BUILD file in"
+ " 'java/com/foo/bar/BUILD', the package would be 'com.foo.bar'."),
},
doc =
"Creates an AndroidResourcesInfo from this target's resource dependencies, ignoring local"
+ " resources. Only processing of deps will be done. This method is deprecated and"
+ " exposed only for backwards-compatibility with existing Native behavior. An empty"
+ " manifest will be generated and included in the provider - this path should not"
+ " be used when an explicit manifest is specified.")
public static AndroidResourcesInfo resourcesFromDeps(
SkylarkRuleContext ctx,
SkylarkList<AndroidResourcesInfo> deps,
boolean neverlink,
Object customPackage)
throws EvalException, InterruptedException {
String pkg = fromNoneable(customPackage, String.class);
if (pkg == null) {
pkg = AndroidManifest.getDefaultPackage(ctx.getRuleContext());
}
return ResourceApk.processFromTransitiveLibraryData(
ctx.getRuleContext(),
ResourceDependencies.fromProviders(deps, /* neverlink = */ neverlink),
AssetDependencies.empty(),
StampedAndroidManifest.createEmpty(ctx.getRuleContext(), pkg, /* exported = */ false))
.toResourceInfo(ctx.getLabel());
}
/**
* Skylark API for stamping an Android manifest
*
* <p>TODO(b/79159379): Stop passing SkylarkRuleContext here
*
* @param ctx the SkylarkRuleContext. We will soon change to using an ActionConstructionContext
* instead. See b/79159379
*/
@SkylarkCallable(
name = "stamp_manifest",
mandatoryPositionals = 1, // SkylarkRuleContext ctx is mandatory
parameters = {
@Param(
name = "manifest",
positional = false,
defaultValue = "None",
type = Artifact.class,
noneable = true,
named = true,
doc = "The manifest to stamp. If not passed, a dummy manifest will be generated"),
@Param(
name = "custom_package",
positional = false,
defaultValue = "None",
type = String.class,
noneable = true,
named = true,
doc =
"The Android application package to stamp the manifest with. If not provided, the"
+ " current Java package, derived from the location of this target's BUILD"
+ " file, will be used. For example, given a BUILD file in"
+ " 'java/com/foo/bar/BUILD', the package would be 'com.foo.bar'."),
@Param(
name = "exports_manifest",
positional = false,
defaultValue = "False",
type = Boolean.class,
named = true,
doc =
"Defaults to False. If passed as True, this manifest will be exported to and"
+ " eventually merged into targets that depend on it. Otherwise, it won't be"
+ " inherited."),
},
doc = "Stamps a manifest with package information.")
public AndroidManifestInfo stampAndroidManifest(
SkylarkRuleContext ctx, Object manifest, Object customPackage, boolean exported) {
String pkg = fromNoneable(customPackage, String.class);
if (pkg == null) {
pkg = AndroidManifest.getDefaultPackage(ctx.getRuleContext());
}
Artifact primaryManifest = fromNoneable(manifest, Artifact.class);
if (primaryManifest == null) {
return StampedAndroidManifest.createEmpty(ctx.getRuleContext(), pkg, exported).toProvider();
}
return new AndroidManifest(primaryManifest, pkg, exported)
.stamp(ctx.getRuleContext())
.toProvider();
}
/**
* Skylark API for merging android_library assets
*
* <p>TODO(b/79159379): Stop passing SkylarkRuleContext here
*
* @param ctx the SkylarkRuleContext. We will soon change to using an ActionConstructionContext
* instead. See b/79159379
*/
@SkylarkCallable(
name = "merge_assets",
mandatoryPositionals = 1, // context
parameters = {
@Param(
name = "assets",
positional = false,
defaultValue = "None",
type = SkylarkList.class,
generic1 = ConfiguredTarget.class,
noneable = true,
named = true,
doc =
"Targets containing raw assets for this target. If passed, 'assets_dir' must also"
+ " be passed."),
@Param(
name = "assets_dir",
positional = false,
defaultValue = "None",
type = String.class,
noneable = true,
named = true,
doc =
"Directory the assets are contained in. Must be passed if and only if 'assets' is"
+ " passed. This path will be split off of the asset paths on the device."),
@Param(
name = "deps",
positional = false,
defaultValue = "[]",
type = SkylarkList.class,
generic1 = AndroidAssetsInfo.class,
named = true,
doc =
"Providers containing assets from dependencies. These assets will be merged"
+ " together with each other and this target's assets."),
@Param(
name = "neverlink",
positional = false,
defaultValue = "False",
type = Boolean.class,
named = true,
doc =
"Defaults to False. If passed as True, these assets will not be inherited by"
+ " targets that depend on this one.")
},
doc =
"Merges this target's assets together with assets inherited from dependencies. Note that,"
+ " by default, actions for validating the merge are created but may not be called."
+ " You may want to force these actions to be called - see the 'validation_result'"
+ " field in AndroidAssetsInfo")
public AndroidAssetsInfo mergeAssets(
SkylarkRuleContext ctx,
Object assets,
Object assetsDir,
SkylarkList<AndroidAssetsInfo> deps,
boolean neverlink)
throws EvalException, InterruptedException {
try {
return AndroidAssets.from(
ctx.getRuleContext(),
listFromNoneable(assets, ConfiguredTarget.class),
isNone(assetsDir) ? null : PathFragment.create(fromNoneable(assetsDir, String.class)))
.parse(ctx.getRuleContext())
.merge(
ctx.getRuleContext(),
AssetDependencies.fromProviders(deps.getImmutableList(), neverlink))
.toProvider();
} catch (RuleErrorException e) {
throw new EvalException(Location.BUILTIN, e);
}
}
/**
* Skylark API for merging android_library resources
*
* <p>TODO(b/79159379): Stop passing SkylarkRuleContext here
*
* @param ctx the SkylarkRuleContext. We will soon change to using an ActionConstructionContext
* instead. See b/79159379
*/
@SkylarkCallable(
name = "merge_resources",
mandatoryPositionals = 2, // context, manifest
parameters = {
@Param(
name = "resources",
positional = false,
defaultValue = "[]",
type = SkylarkList.class,
generic1 = FileProvider.class,
named = true,
doc = "Providers of this target's resources"),
@Param(
name = "deps",
positional = false,
defaultValue = "[]",
type = SkylarkList.class,
generic1 = AndroidResourcesInfo.class,
named = true,
doc =
"Targets containing raw resources from dependencies. These resources will be merged"
+ " together with each other and this target's resources."),
@Param(
name = "neverlink",
positional = false,
defaultValue = "False",
type = Boolean.class,
named = true,
doc =
"Defaults to False. If passed as True, these resources will not be inherited by"
+ " targets that depend on this one."),
@Param(
name = "enable_data_binding",
positional = false,
defaultValue = "False",
type = Boolean.class,
named = true,
doc =
"Defaults to False. If True, processes data binding expressions in layout"
+ " resources."),
},
doc =
"Merges this target's resources together with resources inherited from dependencies."
+ " Returns a dict of provider type to actual info, with elements for"
+ " AndroidResourcesInfo (various resource information) and JavaInfo (wrapping the"
+ " R.class jar, for use in Java compilation). The passed manifest provider is used"
+ " to get Android package information and to validate that all resources it refers"
+ " to are available. Note that this method might do additional processing to this"
+ " manifest, so in the future, you may want to use the manifest contained in this"
+ " method's output instead of this one.")
public SkylarkDict<NativeProvider<?>, NativeInfo> mergeResources(
SkylarkRuleContext ctx,
AndroidManifestInfo manifest,
SkylarkList<ConfiguredTarget> resources,
SkylarkList<AndroidResourcesInfo> deps,
boolean neverlink,
boolean enableDataBinding)
throws EvalException, InterruptedException {
ImmutableList<FileProvider> fileProviders =
resources
.stream()
.map(target -> target.getProvider(FileProvider.class))
.filter(Objects::nonNull)
.collect(ImmutableList.toImmutableList());
try {
AndroidAaptVersion aaptVersion =
AndroidCommon.getAndroidConfig(ctx.getRuleContext()).getAndroidAaptVersion();
ValidatedAndroidResources validated =
AndroidResources.from(ctx.getRuleContext(), fileProviders, "resources")
.parse(
ctx.getRuleContext(),
manifest.asStampedManifest(),
enableDataBinding,
aaptVersion)
.merge(
ctx.getRuleContext(),
ResourceDependencies.fromProviders(deps, neverlink),
enableDataBinding,
aaptVersion)
.validate(ctx.getRuleContext(), aaptVersion);
JavaInfo javaInfo =
JavaInfo.Builder.create()
.setNeverlink(true)
.addProvider(
JavaCompilationInfoProvider.class,
new JavaCompilationInfoProvider.Builder()
.setCompilationClasspath(
NestedSetBuilder.create(Order.NAIVE_LINK_ORDER, validated.getClassJar()))
.build())
.build();
return SkylarkDict.of(
/* env = */ null,
AndroidResourcesInfo.PROVIDER,
validated.toProvider(),
JavaInfo.PROVIDER,
javaInfo);
} catch (RuleErrorException e) {
throw new EvalException(Location.BUILTIN, e);
}
}
/**
* Skylark API for building an Aar for an android_library
*
* <p>TODO(b/79159379): Stop passing SkylarkRuleContext here
*
* @param ctx the SkylarkRuleContext. We will soon change to using an ActionConstructionContext
* instead. See b/79159379
*/
@SkylarkCallable(
name = "make_aar",
mandatoryPositionals = 4, // context, resource info, asset info, and library class jar
parameters = {
@Param(
name = "proguard_specs",
type = SkylarkList.class,
generic1 = ConfiguredTarget.class,
defaultValue = "[]",
positional = false,
named = true,
doc =
"Files to be used as Proguard specification for this target, which will be"
+ " inherited in the top-level target"),
@Param(
name = "deps",
type = SkylarkList.class,
generic1 = AndroidLibraryAarInfo.class,
defaultValue = "[]",
positional = false,
named = true,
doc = "Dependant AAR providers used to build this AAR."),
@Param(
name = "neverlink",
type = Boolean.class,
defaultValue = "False",
positional = false,
named = true,
doc =
"Defaults to False. If true, this target's Aar will not be generated or propagated"
+ " to targets that depend upon it."),
},
doc =
"Builds an AAR and corresponding provider for this target. The resource and asset"
+ " providers from this same target must both be passed, as must the class JAR output"
+ " of building the Android Java library.")
public AndroidLibraryAarInfo makeAar(
SkylarkRuleContext ctx,
AndroidResourcesInfo resourcesInfo,
AndroidAssetsInfo assetsInfo,
Artifact libraryClassJar,
SkylarkList<ConfiguredTarget> proguardSpecs,
SkylarkList<AndroidLibraryAarInfo> deps,
boolean neverlink)
throws EvalException, InterruptedException {
if (neverlink) {
return AndroidLibraryAarInfo.create(
null,
NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER),
NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER));
}
// Get the target's local resources, if defined, from the provider
boolean definesLocalResources = resourcesInfo.getDirectAndroidResources().isSingleton();
AndroidResources resources = AndroidResources.empty();
if (definesLocalResources) {
ValidatedAndroidData validatedAndroidData =
resourcesInfo.getDirectAndroidResources().toList().get(0);
if (validatedAndroidData.getLabel().equals(ctx.getLabel())) {
// TODO(b/77574966): Remove this cast once we get rid of ResourceContainer and can guarantee
// that only properly processed resources are passed into this object.
if (!(validatedAndroidData instanceof ValidatedAndroidResources)) {
throw new EvalException(
Location.BUILTIN, "Old data processing pipeline does not support the Skylark API");
}
resources = (ValidatedAndroidResources) validatedAndroidData;
} else {
definesLocalResources = false;
}
}
// Get the target's local assets, if defined, from the provider
boolean definesLocalAssets = assetsInfo.getDirectParsedAssets().isSingleton();
AndroidAssets assets = AndroidAssets.empty();
if (definesLocalAssets) {
ParsedAndroidAssets parsed = assetsInfo.getDirectParsedAssets().toList().get(0);
if (parsed.getLabel().equals(ctx.getLabel())) {
assets = parsed;
} else {
definesLocalAssets = false;
}
}
if (definesLocalResources != definesLocalAssets) {
throw new EvalException(
Location.BUILTIN,
"Must define either both or none of assets and resources. Use the merge_assets and"
+ " merge_resources methods to define them, or assets_from_deps and"
+ " resources_from_deps to inherit without defining them.");
}
ImmutableList.Builder<Artifact> proguardSpecBuilder = ImmutableList.builder();
for (ConfiguredTarget target : proguardSpecs) {
FileProvider fileProvider = target.getProvider(FileProvider.class);
if (fileProvider != null) {
proguardSpecBuilder.addAll(fileProvider.getFilesToBuild());
}
}
return Aar.makeAar(
ctx.getRuleContext(),
resources,
assets,
resourcesInfo.getManifest(),
resourcesInfo.getRTxt(),
libraryClassJar,
proguardSpecBuilder.build())
.toProvider(deps, definesLocalResources);
}
/**
* Skylark API for doing all resource, asset, and manifest processing for an android_library
*
* <p>TODO(b/79159379): Stop passing SkylarkRuleContext here
*
* @param ctx the SkylarkRuleContext. We will soon change to using an ActionConstructionContext
* instead. See b/79159379
*/
@SkylarkCallable(
name = "process_library_data",
mandatoryPositionals = 2, // ctx and libraryClassJar are required
parameters = {
@Param(
name = "manifest",
positional = false,
type = Artifact.class,
defaultValue = "None",
named = true,
noneable = true,
doc =
"If passed, the manifest to use for this target. Otherwise, a dummy manifest will"
+ " be generated."),
@Param(
name = "resources",
positional = false,
defaultValue = "None",
type = SkylarkList.class,
generic1 = FileProvider.class,
named = true,
noneable = true,
doc = "Providers of this target's resources"),
@Param(
name = "assets",
positional = false,
defaultValue = "None",
type = SkylarkList.class,
generic1 = ConfiguredTarget.class,
noneable = true,
named = true,
doc =
"Targets containing raw assets for this target. If passed, 'assets_dir' must also"
+ " be passed."),
@Param(
name = "assets_dir",
positional = false,
defaultValue = "None",
type = String.class,
noneable = true,
named = true,
doc =
"Directory the assets are contained in. Must be passed if and only if 'assets' is"
+ " passed. This path will be split off of the asset paths on the device."),
@Param(
name = "exports_manifest",
positional = false,
defaultValue = "None",
type = Boolean.class,
named = true,
noneable = true,
doc =
"Defaults to False. If passed as True, this manifest will be exported to and"
+ " eventually merged into targets that depend on it. Otherwise, it won't be"
+ " inherited."),
@Param(
name = "custom_package",
positional = false,
defaultValue = "None",
type = String.class,
noneable = true,
named = true,
doc =
"The Android application package to stamp the manifest with. If not provided, the"
+ " current Java package, derived from the location of this target's BUILD"
+ " file, will be used. For example, given a BUILD file in"
+ " 'java/com/foo/bar/BUILD', the package would be 'com.foo.bar'."),
@Param(
name = "neverlink",
positional = false,
defaultValue = "False",
type = Boolean.class,
named = true,
doc =
"Defaults to False. If passed as True, these resources and assets will not be"
+ " inherited by targets that depend on this one."),
@Param(
name = "enable_data_binding",
positional = false,
defaultValue = "False",
type = Boolean.class,
named = true,
doc =
"Defaults to False. If True, processes data binding expressions in layout"
+ " resources."),
@Param(
name = "proguard_specs",
type = SkylarkList.class,
generic1 = ConfiguredTarget.class,
defaultValue = "[]",
positional = false,
named = true,
doc =
"Files to be used as Proguard specification for this target, which will be"
+ " inherited in the top-level target"),
@Param(
name = "deps",
positional = false,
defaultValue = "[]",
type = SkylarkList.class,
generic1 = AndroidAssetsInfo.class,
named = true,
doc =
"Dependency targets. Providers will be extracted from these dependencies for each"
+ " type of data."),
},
doc =
"Performs full processing of data for android_library or similar rules. Returns a dict"
+ " from provider type to providers for the target.")
public SkylarkDict<NativeProvider<?>, NativeInfo> processLibraryData(
SkylarkRuleContext ctx,
Artifact libraryClassJar,
Object manifest,
Object resources,
Object assets,
Object assetsDir,
Object exportsManifest,
Object customPackage,
boolean neverlink,
boolean enableDataBinding,
SkylarkList<ConfiguredTarget> proguardSpecs,
SkylarkList<ConfiguredTarget> deps)
throws InterruptedException, EvalException {
SkylarkList<AndroidResourcesInfo> resourceDeps =
getProviders(deps, AndroidResourcesInfo.PROVIDER);
SkylarkList<AndroidAssetsInfo> assetDeps = getProviders(deps, AndroidAssetsInfo.PROVIDER);
ImmutableMap.Builder<NativeProvider<?>, NativeInfo> infoBuilder = ImmutableMap.builder();
AndroidResourcesInfo resourcesInfo;
AndroidAssetsInfo assetsInfo;
if (isNone(manifest)
&& isNone(resources)
&& isNone(assets)
&& isNone(assetsDir)
&& isNone(exportsManifest)) {
// If none of these parameters were specified, for backwards compatibility, do not trigger
// data processing.
resourcesInfo = resourcesFromDeps(ctx, resourceDeps, neverlink, customPackage);
assetsInfo = assetsFromDeps(ctx, assetDeps, neverlink);
infoBuilder.put(AndroidResourcesInfo.PROVIDER, resourcesInfo);
} else {
AndroidManifestInfo baseManifest =
stampAndroidManifest(
ctx,
manifest,
customPackage,
fromNoneableOrDefault(exportsManifest, Boolean.class, false));
SkylarkDict<NativeProvider<?>, NativeInfo> resourceOutput =
mergeResources(
ctx,
baseManifest,
listFromNoneableOrEmpty(resources, ConfiguredTarget.class),
resourceDeps,
neverlink,
enableDataBinding);
resourcesInfo = (AndroidResourcesInfo) resourceOutput.get(AndroidResourcesInfo.PROVIDER);
assetsInfo = mergeAssets(ctx, assets, assetsDir, assetDeps, neverlink);
infoBuilder.putAll(resourceOutput);
}
AndroidLibraryAarInfo aarInfo =
makeAar(
ctx,
resourcesInfo,
assetsInfo,
libraryClassJar,
proguardSpecs,
getProviders(deps, AndroidLibraryAarInfo.PROVIDER),
neverlink);
// Only expose the aar provider in non-neverlinked actions
if (!neverlink) {
infoBuilder.put(AndroidLibraryAarInfo.PROVIDER, aarInfo);
}
// Expose the updated manifest that was changed by resource processing
// TODO(b/30817309): Use the base manifest once manifests are no longer changed in resource
// processing
AndroidManifestInfo manifestInfo = resourcesInfo.getManifest().toProvider();
return SkylarkDict.copyOf(
/* env = */ null,
infoBuilder
.put(AndroidAssetsInfo.PROVIDER, assetsInfo)
.put(AndroidManifestInfo.PROVIDER, manifestInfo)
.build());
}
/** Checks if a "Noneable" object passed by Skylark is "None", which Java should treat as null. */
private static boolean isNone(Object object) {
return object == Runtime.NONE;
}
/**
* Converts a "Noneable" Object passed by Skylark to an nullable object of the appropriate type.
*
* <p>Skylark "Noneable" types are passed in as an Object that may be either the correct type or a
* Runtime.NONE object. Skylark will handle type checking, based on the appropriate @param
* annotation, but we still need to do the actual cast (or conversion to null) ourselves.
*
* @param object the Noneable object
* @param clazz the correct class, as defined in the @Param annotation
* @param <T> the type to cast to
* @return {@code null}, if the noneable argument was None, or the cast object, otherwise.
*/
@Nullable
private static <T> T fromNoneable(Object object, Class<T> clazz) {
if (isNone(object)) {
return null;
}
return clazz.cast(object);
}
private static <T> T fromNoneableOrDefault(Object object, Class<T> clazz, T defaultValue) {
T value = fromNoneable(object, clazz);
if (value == null) {
return defaultValue;
}
return value;
}
/**
* Converts a "Noneable" Object passed by Skylark to a List of the appropriate type.
*
* <p>This first calls {@link #fromNoneable(Object, Class)} to get a SkylarkList<?>, then safely
* casts it to a list with the appropriate generic.
*/
@Nullable
private static <T> List<T> listFromNoneable(Object object, Class<T> clazz) throws EvalException {
SkylarkList<?> asList = fromNoneable(object, SkylarkList.class);
if (asList == null) {
return null;
}
return SkylarkList.castList(asList, clazz, null);
}
private static <T> SkylarkList<T> listFromNoneableOrEmpty(Object object, Class<T> clazz)
throws EvalException {
List<T> value = listFromNoneable(object, clazz);
if (value == null) {
return SkylarkList.createImmutable(ImmutableList.of());
}
return SkylarkList.createImmutable(value);
}
private static <T extends NativeInfo> SkylarkList<T> getProviders(
SkylarkList<ConfiguredTarget> targets, NativeProvider<T> provider) {
return SkylarkList.createImmutable(
targets
.stream()
.map(target -> target.get(provider))
.filter(Objects::nonNull)
.collect(ImmutableList.toImmutableList()));
}
}
| src/main/java/com/google/devtools/build/lib/rules/android/AndroidSkylarkData.java | // Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.android;
import com.google.common.collect.ImmutableList;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.FileProvider;
import com.google.devtools.build.lib.analysis.skylark.SkylarkRuleContext;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.collect.nestedset.Order;
import com.google.devtools.build.lib.events.Location;
import com.google.devtools.build.lib.packages.NativeInfo;
import com.google.devtools.build.lib.packages.NativeProvider;
import com.google.devtools.build.lib.packages.RuleClass.ConfiguredTargetFactory.RuleErrorException;
import com.google.devtools.build.lib.rules.android.AndroidConfiguration.AndroidAaptVersion;
import com.google.devtools.build.lib.rules.android.AndroidLibraryAarInfo.Aar;
import com.google.devtools.build.lib.rules.java.JavaCompilationInfoProvider;
import com.google.devtools.build.lib.rules.java.JavaInfo;
import com.google.devtools.build.lib.skylarkinterface.Param;
import com.google.devtools.build.lib.skylarkinterface.SkylarkCallable;
import com.google.devtools.build.lib.skylarkinterface.SkylarkModule;
import com.google.devtools.build.lib.syntax.EvalException;
import com.google.devtools.build.lib.syntax.Runtime;
import com.google.devtools.build.lib.syntax.SkylarkDict;
import com.google.devtools.build.lib.syntax.SkylarkList;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.util.List;
import java.util.Objects;
import javax.annotation.Nullable;
/** Skylark-visible methods for working with Android data (manifests, resources, and assets). */
@SkylarkModule(
name = "android_data",
doc =
"Utilities for working with Android data (manifests, resources, and assets). "
+ "This API is non-final and subject to change without warning; do not rely on it.")
public class AndroidSkylarkData {
/**
* Skylark API for getting a asset provider for android_library targets that don't specify assets.
*
* <p>TODO(b/79159379): Stop passing SkylarkRuleContext here
*
* @param ctx the SkylarkRuleContext. We will soon change to using an ActionConstructionContext
* instead. See b/79159379
*/
@SkylarkCallable(
name = "assets_from_deps",
mandatoryPositionals = 1, // context
parameters = {
@Param(
name = "deps",
defaultValue = "[]",
type = SkylarkList.class,
generic1 = AndroidAssetsInfo.class,
positional = false,
named = true,
doc = "Dependencies to inherit assets from"),
@Param(
name = "neverlink",
defaultValue = "False",
type = Boolean.class,
positional = false,
named = true,
doc =
"Defaults to False. If true, assets will not be exposed to targets that depend on"
+ " them.")
},
doc =
"Creates an AndroidAssetsInfo from this target's asset dependencies, ignoring local"
+ " assets. No processing will be done. This method is deprecated and exposed only"
+ " for backwards-compatibility with existing Native behavior.")
public static AndroidAssetsInfo assetsFromDeps(
SkylarkRuleContext ctx, SkylarkList<AndroidAssetsInfo> deps, boolean neverlink)
throws EvalException {
return AssetDependencies.fromProviders(deps, neverlink).toInfo(ctx.getLabel());
}
/**
* Skylark API for getting a resource provider for android_library targets that don't specify
* resources.
*
* <p>TODO(b/79159379): Stop passing SkylarkRuleContext here
*
* @param ctx the SkylarkRuleContext. We will soon change to using an ActionConstructionContext
* instead. See b/79159379
*/
@SkylarkCallable(
name = "resources_from_deps",
mandatoryPositionals = 1, // context
parameters = {
@Param(
name = "deps",
defaultValue = "[]",
type = SkylarkList.class,
generic1 = AndroidResourcesInfo.class,
positional = false,
named = true,
doc = "Dependencies to inherit resources from"),
@Param(
name = "neverlink",
defaultValue = "False",
type = Boolean.class,
positional = false,
named = true,
doc =
"Defaults to False. If true, resources will not be exposed to targets that depend"
+ " on them."),
@Param(
name = "custom_package",
positional = false,
defaultValue = "None",
type = String.class,
noneable = true,
named = true,
doc =
"The Android application package to stamp the manifest with. If not provided, the"
+ " current Java package, derived from the location of this target's BUILD"
+ " file, will be used. For example, given a BUILD file in"
+ " 'java/com/foo/bar/BUILD', the package would be 'com.foo.bar'."),
},
doc =
"Creates an AndroidResourcesInfo from this target's resource dependencies, ignoring local"
+ " resources. Only processing of deps will be done. This method is deprecated and"
+ " exposed only for backwards-compatibility with existing Native behavior. An empty"
+ " manifest will be generated and included in the provider - this path should not"
+ " be used when an explicit manifest is specified.")
public static AndroidResourcesInfo resourcesFromDeps(
SkylarkRuleContext ctx,
SkylarkList<AndroidResourcesInfo> deps,
boolean neverlink,
Object customPackage)
throws EvalException, InterruptedException {
String pkg = fromNoneable(customPackage, String.class);
if (pkg == null) {
pkg = AndroidManifest.getDefaultPackage(ctx.getRuleContext());
}
return ResourceApk.processFromTransitiveLibraryData(
ctx.getRuleContext(),
ResourceDependencies.fromProviders(deps, /* neverlink = */ neverlink),
AssetDependencies.empty(),
StampedAndroidManifest.createEmpty(ctx.getRuleContext(), pkg, /* exported = */ false))
.toResourceInfo(ctx.getLabel());
}
/**
* Skylark API for stamping an Android manifest
*
* <p>TODO(b/79159379): Stop passing SkylarkRuleContext here
*
* @param ctx the SkylarkRuleContext. We will soon change to using an ActionConstructionContext
* instead. See b/79159379
*/
@SkylarkCallable(
name = "stamp_manifest",
mandatoryPositionals = 1, // SkylarkRuleContext ctx is mandatory
parameters = {
@Param(
name = "manifest",
positional = false,
defaultValue = "None",
type = Artifact.class,
noneable = true,
named = true,
doc = "The manifest to stamp. If not passed, a dummy manifest will be generated"),
@Param(
name = "custom_package",
positional = false,
defaultValue = "None",
type = String.class,
noneable = true,
named = true,
doc =
"The Android application package to stamp the manifest with. If not provided, the"
+ " current Java package, derived from the location of this target's BUILD"
+ " file, will be used. For example, given a BUILD file in"
+ " 'java/com/foo/bar/BUILD', the package would be 'com.foo.bar'."),
@Param(
name = "exports_manifest",
positional = false,
defaultValue = "False",
type = Boolean.class,
named = true,
doc =
"Defaults to False. If passed as True, this manifest will be exported to and"
+ " eventually merged into targets that depend on it. Otherwise, it won't be"
+ " inherited."),
},
doc = "Stamps a manifest with package information.")
public AndroidManifestInfo stampAndroidManifest(
SkylarkRuleContext ctx, Object manifest, Object customPackage, boolean exported) {
String pkg = fromNoneable(customPackage, String.class);
if (pkg == null) {
pkg = AndroidManifest.getDefaultPackage(ctx.getRuleContext());
}
Artifact primaryManifest = fromNoneable(manifest, Artifact.class);
if (primaryManifest == null) {
return StampedAndroidManifest.createEmpty(ctx.getRuleContext(), pkg, exported).toProvider();
}
return new AndroidManifest(primaryManifest, pkg, exported)
.stamp(ctx.getRuleContext())
.toProvider();
}
/**
* Skylark API for merging android_library assets
*
* <p>TODO(b/79159379): Stop passing SkylarkRuleContext here
*
* @param ctx the SkylarkRuleContext. We will soon change to using an ActionConstructionContext
* instead. See b/79159379
*/
@SkylarkCallable(
name = "merge_assets",
mandatoryPositionals = 1, // context
parameters = {
@Param(
name = "assets",
positional = false,
defaultValue = "None",
type = SkylarkList.class,
generic1 = ConfiguredTarget.class,
noneable = true,
named = true,
doc =
"Targets containing raw assets for this target. If passed, 'assets_dir' must also"
+ " be passed."),
@Param(
name = "assets_dir",
positional = false,
defaultValue = "None",
type = String.class,
noneable = true,
named = true,
doc =
"Directory the assets are contained in. Must be passed if and only if 'assets' is"
+ " passed. This path will be split off of the asset paths on the device."),
@Param(
name = "deps",
positional = false,
defaultValue = "[]",
type = SkylarkList.class,
generic1 = AndroidAssetsInfo.class,
named = true,
doc =
"Providers containing assets from dependencies. These assets will be merged"
+ " together with each other and this target's assets."),
@Param(
name = "neverlink",
positional = false,
defaultValue = "False",
type = Boolean.class,
named = true,
doc =
"Defaults to False. If passed as True, these assets will not be inherited by"
+ " targets that depend on this one.")
},
doc =
"Merges this target's assets together with assets inherited from dependencies. Note that,"
+ " by default, actions for validating the merge are created but may not be called."
+ " You may want to force these actions to be called - see the 'validation_result'"
+ " field in AndroidAssetsInfo")
public AndroidAssetsInfo mergeAssets(
SkylarkRuleContext ctx,
Object assets,
Object assetsDir,
SkylarkList<AndroidAssetsInfo> deps,
boolean neverlink)
throws EvalException, InterruptedException {
try {
return AndroidAssets.from(
ctx.getRuleContext(),
listFromNoneable(assets, ConfiguredTarget.class),
isNone(assetsDir) ? null : PathFragment.create(fromNoneable(assetsDir, String.class)))
.parse(ctx.getRuleContext())
.merge(
ctx.getRuleContext(),
AssetDependencies.fromProviders(deps.getImmutableList(), neverlink))
.toProvider();
} catch (RuleErrorException e) {
throw new EvalException(Location.BUILTIN, e);
}
}
/**
* Skylark API for merging android_library resources
*
* <p>TODO(b/79159379): Stop passing SkylarkRuleContext here
*
* @param ctx the SkylarkRuleContext. We will soon change to using an ActionConstructionContext
* instead. See b/79159379
*/
@SkylarkCallable(
name = "merge_resources",
mandatoryPositionals = 2, // context, manifest
parameters = {
@Param(
name = "resources",
positional = false,
defaultValue = "[]",
type = SkylarkList.class,
generic1 = FileProvider.class,
named = true,
doc = "Providers of this target's resources"),
@Param(
name = "deps",
positional = false,
defaultValue = "[]",
type = SkylarkList.class,
generic1 = AndroidResourcesInfo.class,
named = true,
doc =
"Targets containing raw resources from dependencies. These resources will be merged"
+ " together with each other and this target's resources."),
@Param(
name = "neverlink",
positional = false,
defaultValue = "False",
type = Boolean.class,
named = true,
doc =
"Defaults to False. If passed as True, these resources will not be inherited by"
+ " targets that depend on this one."),
@Param(
name = "enable_data_binding",
positional = false,
defaultValue = "False",
type = Boolean.class,
named = true,
doc =
"Defaults to False. If True, processes data binding expressions in layout"
+ " resources."),
},
doc =
"Merges this target's resources together with resources inherited from dependencies."
+ " Returns a dict of provider type to actual info, with elements for"
+ " AndroidResourcesInfo (various resource information) and JavaInfo (wrapping the"
+ " R.class jar, for use in Java compilation). The passed manifest provider is used"
+ " to get Android package information and to validate that all resources it refers"
+ " to are available. Note that this method might do additional processing to this"
+ " manifest, so in the future, you may want to use the manifest contained in this"
+ " method's output instead of this one.")
public SkylarkDict<NativeProvider<?>, NativeInfo> mergeResources(
SkylarkRuleContext ctx,
AndroidManifestInfo manifest,
SkylarkList<ConfiguredTarget> resources,
SkylarkList<AndroidResourcesInfo> deps,
boolean neverlink,
boolean enableDataBinding)
throws EvalException, InterruptedException {
ImmutableList<FileProvider> fileProviders =
resources
.stream()
.map(target -> target.getProvider(FileProvider.class))
.filter(Objects::nonNull)
.collect(ImmutableList.toImmutableList());
try {
AndroidAaptVersion aaptVersion =
AndroidCommon.getAndroidConfig(ctx.getRuleContext()).getAndroidAaptVersion();
ValidatedAndroidResources validated =
AndroidResources.from(ctx.getRuleContext(), fileProviders, "resources")
.parse(
ctx.getRuleContext(),
manifest.asStampedManifest(),
enableDataBinding,
aaptVersion)
.merge(
ctx.getRuleContext(),
ResourceDependencies.fromProviders(deps, neverlink),
enableDataBinding,
aaptVersion)
.validate(ctx.getRuleContext(), aaptVersion);
JavaInfo javaInfo =
JavaInfo.Builder.create()
.setNeverlink(true)
.addProvider(
JavaCompilationInfoProvider.class,
new JavaCompilationInfoProvider.Builder()
.setCompilationClasspath(
NestedSetBuilder.create(Order.NAIVE_LINK_ORDER, validated.getClassJar()))
.build())
.build();
return SkylarkDict.of(
/* env = */ null,
AndroidResourcesInfo.PROVIDER,
validated.toProvider(),
JavaInfo.PROVIDER,
javaInfo);
} catch (RuleErrorException e) {
throw new EvalException(Location.BUILTIN, e);
}
}
/**
* Skylark API for building an Aar for an android_library
*
* <p>TODO(b/79159379): Stop passing SkylarkRuleContext here
*
* @param ctx the SkylarkRuleContext. We will soon change to using an ActionConstructionContext
* instead. See b/79159379
*/
@SkylarkCallable(
name = "make_aar",
mandatoryPositionals = 4, // context, resource info, asset info, and library class jar
parameters = {
@Param(
name = "proguard_specs",
type = SkylarkList.class,
generic1 = ConfiguredTarget.class,
defaultValue = "[]",
positional = false,
named = true,
doc =
"Files to be used as Proguard specification for this target, which will be"
+ " inherited in the top-level target"),
@Param(
name = "deps",
type = SkylarkList.class,
generic1 = AndroidLibraryAarInfo.class,
defaultValue = "[]",
positional = false,
named = true,
doc = "Dependant AAR providers used to build this AAR."),
@Param(
name = "neverlink",
type = Boolean.class,
defaultValue = "False",
positional = false,
named = true,
doc =
"Defaults to False. If true, this target's Aar will not be generated or propagated"
+ " to targets that depend upon it."),
},
doc =
"Builds an AAR and corresponding provider for this target. The resource and asset"
+ " providers from this same target must both be passed, as must the class JAR output"
+ " of building the Android Java library.")
public AndroidLibraryAarInfo makeAar(
SkylarkRuleContext ctx,
AndroidResourcesInfo resourcesInfo,
AndroidAssetsInfo assetsInfo,
Artifact libraryClassJar,
SkylarkList<ConfiguredTarget> proguardSpecs,
SkylarkList<AndroidLibraryAarInfo> deps,
boolean neverlink)
throws EvalException, InterruptedException {
if (neverlink) {
return AndroidLibraryAarInfo.create(
null,
NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER),
NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER));
}
// Get the target's local resources, if defined, from the provider
boolean definesLocalResources = resourcesInfo.getDirectAndroidResources().isSingleton();
AndroidResources resources = AndroidResources.empty();
if (definesLocalResources) {
ValidatedAndroidData validatedAndroidData =
resourcesInfo.getDirectAndroidResources().toList().get(0);
if (validatedAndroidData.getLabel().equals(ctx.getLabel())) {
// TODO(b/77574966): Remove this cast once we get rid of ResourceContainer and can guarantee
// that only properly processed resources are passed into this object.
if (!(validatedAndroidData instanceof ValidatedAndroidResources)) {
throw new EvalException(
Location.BUILTIN, "Old data processing pipeline does not support the Skylark API");
}
resources = (ValidatedAndroidResources) validatedAndroidData;
} else {
definesLocalResources = false;
}
}
// Get the target's local assets, if defined, from the provider
boolean definesLocalAssets = assetsInfo.getDirectParsedAssets().isSingleton();
AndroidAssets assets = AndroidAssets.empty();
if (definesLocalAssets) {
ParsedAndroidAssets parsed = assetsInfo.getDirectParsedAssets().toList().get(0);
if (parsed.getLabel().equals(ctx.getLabel())) {
assets = parsed;
} else {
definesLocalAssets = false;
}
}
if (definesLocalResources != definesLocalAssets) {
throw new EvalException(
Location.BUILTIN,
"Must define either both or none of assets and resources. Use the merge_assets and"
+ " merge_resources methods to define them, or assets_from_deps and"
+ " resources_from_deps to inherit without defining them.");
}
ImmutableList.Builder<Artifact> proguardSpecBuilder = ImmutableList.builder();
for (ConfiguredTarget target : proguardSpecs) {
FileProvider fileProvider = target.getProvider(FileProvider.class);
if (fileProvider != null) {
proguardSpecBuilder.addAll(fileProvider.getFilesToBuild());
}
}
return Aar.makeAar(
ctx.getRuleContext(),
resources,
assets,
resourcesInfo.getManifest(),
resourcesInfo.getRTxt(),
libraryClassJar,
proguardSpecBuilder.build())
.toProvider(deps, definesLocalResources);
}
/** Checks if a "Noneable" object passed by Skylark is "None", which Java should treat as null. */
private static boolean isNone(Object object) {
return object == Runtime.NONE;
}
/**
* Converts a "Noneable" Object passed by Skylark to an nullable object of the appropriate type.
*
* <p>Skylark "Noneable" types are passed in as an Object that may be either the correct type or a
* Runtime.NONE object. Skylark will handle type checking, based on the appropriate @param
* annotation, but we still need to do the actual cast (or conversion to null) ourselves.
*
* @param object the Noneable object
* @param clazz the correct class, as defined in the @Param annotation
* @param <T> the type to cast to
* @return {@code null}, if the noneable argument was None, or the cast object, otherwise.
*/
@Nullable
private static <T> T fromNoneable(Object object, Class<T> clazz) {
if (isNone(object)) {
return null;
}
return clazz.cast(object);
}
/**
* Converts a "Noneable" Object passed by Skylark to a List of the appropriate type.
*
* <p>This first calls {@link #fromNoneable(Object, Class)} to get a SkylarkList<?>, then safely
* casts it to a list with the appropriate generic.
*/
@Nullable
private static <T> List<T> listFromNoneable(Object object, Class<T> clazz) throws EvalException {
SkylarkList<?> asList = fromNoneable(object, SkylarkList.class);
if (asList == null) {
return null;
}
return SkylarkList.castList(asList, clazz, null);
}
}
| Expose single method for doing all android_library data processing in Skylark
RELNOTES: none
PiperOrigin-RevId: 195977060
| src/main/java/com/google/devtools/build/lib/rules/android/AndroidSkylarkData.java | Expose single method for doing all android_library data processing in Skylark |
|
Java | apache-2.0 | 4f1e4942335ca9035d4ab1491b849a2bc20f700e | 0 | tetrapods/core,tetrapods/core,tetrapods/core,tetrapods/core,tetrapods/core | package io.tetrapod.core.storage;
import io.netty.channel.socket.SocketChannel;
import io.tetrapod.core.*;
import io.tetrapod.core.registry.Registry;
import io.tetrapod.core.rpc.*;
import io.tetrapod.core.utils.Util;
import io.tetrapod.protocol.core.*;
import java.net.ConnectException;
import java.util.concurrent.TimeUnit;
import org.slf4j.*;
/**
* Represents another Tetrapod in the cluster. This maintains a persistent connection with that tetrapod and transmits RPC for Raft
* consensus
*/
public class TetrapodPeer implements Session.Listener, SessionFactory {
public static final Logger logger = LoggerFactory.getLogger(TetrapodPeer.class);
public final TetrapodService service;
public final int entityId;
public final int peerId;
public final String host;
public final int clusterPort;
protected int servicePort;
private Session session;
private int failures;
private boolean pendingConnect;
private boolean joined = false;
public String uuid = null;
public TetrapodPeer(TetrapodService service, int entityId, String host, int clusterPort, int servicePort) {
this.service = service;
this.entityId = entityId;
this.host = host;
this.clusterPort = clusterPort;
this.servicePort = servicePort;
this.peerId = entityId >> Registry.PARENT_ID_SHIFT;
scheduleReconnect(0);
}
public synchronized boolean isConnected() {
return session != null && session.isConnected();
}
protected synchronized void setSession(Session ses) {
this.failures = 0;
this.session = ses;
this.session.setMyEntityId(service.getEntityId());
this.session.setTheirEntityId(entityId);
this.session.addSessionListener(this);
if (!joined && entityId != service.getEntityId()) {
joinCluster();
}
}
public synchronized Session getSession() {
return session;
}
/**
* Session factory for our sessions to cluster
*/
@Override
public Session makeSession(SocketChannel ch) {
final Session ses = new WireSession(ch, service);
ses.setRelayHandler(service);
ses.setMyEntityId(service.getEntityId());
ses.setMyEntityType(Core.TYPE_TETRAPOD);
ses.setTheirEntityType(Core.TYPE_TETRAPOD);
return ses;
}
public void connect() {
try {
// note: we briefly sync to make sure we don't try at the same time as another thread,
// but we can't hold the lock while calling sync() on the connect() call below
synchronized (this) {
if (pendingConnect) {
return;
}
pendingConnect = true;
}
if (!service.isShuttingDown() && !isConnected() && service.getEntityId() != 0 && service.getEntityId() != entityId) {
if (failures < 10) {
logger.info(" - Joining Tetrapod {} @ {} : {}", entityId, host, clusterPort);
}
final Client client = new Client(this);
client.connect(host, clusterPort, service.getDispatcher()).sync();
setSession(client.getSession());
}
} catch (Throwable e) {
if (!(e instanceof ConnectException)) {
logger.error(e.getMessage(), e);
}
++failures;
} finally {
synchronized (this) {
pendingConnect = false;
}
}
}
private synchronized void scheduleReconnect(int delayInSeconds) {
if (!service.isShuttingDown()) {
service.getDispatcher().dispatch(delayInSeconds, TimeUnit.SECONDS, new Runnable() {
public void run() {
connect();
}
});
}
}
@Override
public synchronized void onSessionStop(Session ses) {
joined = false;
service.onEntityDisconnected(ses);
scheduleReconnect(1);
}
@Override
public synchronized void onSessionStart(final Session ses) {}
@Override
public String toString() {
return String.format("pod[0x%08X @ %s:%d,%d]", entityId, host, servicePort, clusterPort);
}
private synchronized void joinCluster() {
joined = true;
session.sendRequest(
new ClusterJoinRequest(service.buildNumber, service.getStatus(), Util.getHostName(), service.getEntityId(),
service.getServicePort(), service.getClusterPort()), Core.DIRECT).handle(new ResponseHandler() {
@Override
public void onResponse(Response res) {
if (res.isError()) {
logger.error("ClusterJoinRequest Failed {}", res);
synchronized (TetrapodPeer.this) {
joined = false;
}
} else {
logger.info("ClusterJoinRequest Succeeded");
}
}
});
}
}
| Tetrapod-Tetrapod/src/io/tetrapod/core/storage/TetrapodPeer.java | package io.tetrapod.core.storage;
import io.netty.channel.socket.SocketChannel;
import io.tetrapod.core.*;
import io.tetrapod.core.registry.Registry;
import io.tetrapod.core.rpc.*;
import io.tetrapod.core.utils.Util;
import io.tetrapod.protocol.core.*;
import java.net.ConnectException;
import java.util.concurrent.TimeUnit;
import org.slf4j.*;
/**
* Represents another Tetrapod in the cluster. This maintains a persistent connection with that tetrapod and transmits RPC for Raft
* consensus
*/
public class TetrapodPeer implements Session.Listener, SessionFactory {
public static final Logger logger = LoggerFactory.getLogger(TetrapodPeer.class);
public final TetrapodService service;
public final int entityId;
public final int peerId;
public final String host;
public final int clusterPort;
protected int servicePort;
private Session session;
private int failures;
private boolean pendingConnect;
private boolean joined = false;
public String uuid = null;
public TetrapodPeer(TetrapodService service, int entityId, String host, int clusterPort, int servicePort) {
this.service = service;
this.entityId = entityId;
this.host = host;
this.clusterPort = clusterPort;
this.servicePort = servicePort;
this.peerId = entityId >> Registry.PARENT_ID_SHIFT;
scheduleReconnect(0);
}
public synchronized boolean isConnected() {
return session != null && session.isConnected();
}
protected synchronized void setSession(Session ses) {
this.failures = 0;
this.session = ses;
this.session.setMyEntityId(service.getEntityId());
this.session.setTheirEntityId(entityId);
this.session.addSessionListener(this);
if (!joined && entityId != service.getEntityId()) {
joinCluster();
}
}
public synchronized Session getSession() {
return session;
}
/**
* Session factory for our sessions to cluster
*/
@Override
public Session makeSession(SocketChannel ch) {
final Session ses = new WireSession(ch, service);
ses.setRelayHandler(service);
ses.setMyEntityId(service.getEntityId());
ses.setMyEntityType(Core.TYPE_TETRAPOD);
ses.setTheirEntityType(Core.TYPE_TETRAPOD);
return ses;
}
public void connect() {
try {
// note: we briefly sync to make sure we don't try at the same time as another thread,
// but we can't hold the lock while calling sync() on the connect() call below
synchronized (this) {
if (pendingConnect) {
return;
}
pendingConnect = true;
}
if (!service.isShuttingDown() && !isConnected() && service.getEntityId() != 0 && service.getEntityId() != entityId) {
if (failures < 10) {
logger.info(" - Joining Tetrapod {} @ {} : {}", entityId, host, clusterPort);
}
final Client client = new Client(this);
client.connect(host, clusterPort, service.getDispatcher()).sync();
setSession(client.getSession());
}
} catch (Throwable e) {
if (!(e instanceof ConnectException)) {
logger.error(e.getMessage(), e);
}
++failures;
} finally {
synchronized (this) {
pendingConnect = false;
}
}
}
private synchronized void scheduleReconnect(int delayInSeconds) {
if (!service.isShuttingDown()) {
service.getDispatcher().dispatch(delayInSeconds, TimeUnit.SECONDS, new Runnable() {
public void run() {
connect();
}
});
}
}
@Override
public synchronized void onSessionStop(Session ses) {
service.onEntityDisconnected(ses);
scheduleReconnect(1);
}
@Override
public synchronized void onSessionStart(final Session ses) {}
@Override
public String toString() {
return String.format("pod[0x%08X @ %s:%d,%d]", entityId, host, servicePort, clusterPort);
}
private synchronized void joinCluster() {
joined = true;
session.sendRequest(
new ClusterJoinRequest(service.buildNumber, service.getStatus(), Util.getHostName(), service.getEntityId(),
service.getServicePort(), service.getClusterPort()), Core.DIRECT).handle(new ResponseHandler() {
@Override
public void onResponse(Response res) {
if (res.isError()) {
logger.error("ClusterJoinRequest Failed {}", res);
synchronized (TetrapodPeer.this) {
joined = false;
}
} else {
logger.info("ClusterJoinRequest Succeeded");
}
}
});
}
}
| Reset joined flag when a tetrapod peer session disconnects so that we actually re-sync registries later :derp:
| Tetrapod-Tetrapod/src/io/tetrapod/core/storage/TetrapodPeer.java | Reset joined flag when a tetrapod peer session disconnects so that we actually re-sync registries later :derp: |
|
Java | apache-2.0 | 3674d4868f1cdfe05cbc269ea4e2a7014db47902 | 0 | bartosz-grabski/jackrabbit,tripodsan/jackrabbit,sdmcraft/jackrabbit,afilimonov/jackrabbit,SylvesterAbreu/jackrabbit,afilimonov/jackrabbit,Overseas-Student-Living/jackrabbit,afilimonov/jackrabbit,Kast0rTr0y/jackrabbit,kigsmtua/jackrabbit,Overseas-Student-Living/jackrabbit,SylvesterAbreu/jackrabbit,bartosz-grabski/jackrabbit,Overseas-Student-Living/jackrabbit,kigsmtua/jackrabbit,tripodsan/jackrabbit,sdmcraft/jackrabbit,SylvesterAbreu/jackrabbit,kigsmtua/jackrabbit,sdmcraft/jackrabbit,Kast0rTr0y/jackrabbit,bartosz-grabski/jackrabbit,tripodsan/jackrabbit,Kast0rTr0y/jackrabbit | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.jcr2spi.lock;
import org.apache.jackrabbit.jcr2spi.ItemManager;
import org.apache.jackrabbit.jcr2spi.SessionListener;
import org.apache.jackrabbit.jcr2spi.WorkspaceManager;
import org.apache.jackrabbit.jcr2spi.config.CacheBehaviour;
import org.apache.jackrabbit.jcr2spi.hierarchy.NodeEntry;
import org.apache.jackrabbit.jcr2spi.hierarchy.HierarchyEntry;
import org.apache.jackrabbit.jcr2spi.operation.Operation;
import org.apache.jackrabbit.jcr2spi.operation.LockOperation;
import org.apache.jackrabbit.jcr2spi.operation.LockRelease;
import org.apache.jackrabbit.jcr2spi.operation.LockRefresh;
import org.apache.jackrabbit.jcr2spi.state.NodeState;
import org.apache.jackrabbit.jcr2spi.state.Status;
import org.apache.jackrabbit.jcr2spi.state.ItemStateLifeCycleListener;
import org.apache.jackrabbit.jcr2spi.state.ItemState;
import org.apache.jackrabbit.jcr2spi.state.PropertyState;
import org.apache.jackrabbit.spi.LockInfo;
import org.apache.jackrabbit.spi.NodeId;
import org.apache.jackrabbit.name.QName;
import org.slf4j.LoggerFactory;
import org.slf4j.Logger;
import javax.jcr.lock.Lock;
import javax.jcr.lock.LockException;
import javax.jcr.RepositoryException;
import javax.jcr.Node;
import javax.jcr.Item;
import javax.jcr.Session;
import javax.jcr.ItemNotFoundException;
import java.util.Iterator;
import java.util.Map;
import java.util.HashMap;
/**
* <code>LockManagerImpl</code>...
* TODO: TOBEFIXED. Lock objects obtained through this mgr are not informed if another session is or becomes lock-holder and removes the lock again.
*/
public class LockManagerImpl implements LockManager, SessionListener {
private static Logger log = LoggerFactory.getLogger(LockManagerImpl.class);
/**
* WorkspaceManager used to apply and release locks as well as to retrieve
* Lock information for a given NodeState.
* NOTE: The workspace manager must not be used as ItemStateManager.
*/
private final WorkspaceManager wspManager;
private final ItemManager itemManager;
private final CacheBehaviour cacheBehaviour;
/**
* Map holding all locks that where created by this <code>Session</code> upon
* calls to {@link LockManager#lock(NodeState,boolean,boolean)} or to
* {@link LockManager#getLock(NodeState)}. The map entries are removed
* only if a lock ends his life by {@link Node#unlock()} or by implicit
* unlock upon {@link Session#logout()}.
*/
private final Map lockMap;
public LockManagerImpl(WorkspaceManager wspManager, ItemManager itemManager,
CacheBehaviour cacheBehaviour) {
this.wspManager = wspManager;
this.itemManager = itemManager;
this.cacheBehaviour = cacheBehaviour;
// use hard references in order to make sure, that entries refering
// to locks created by the current session are not removed.
lockMap = new HashMap();
}
/**
* @see LockManager#lock(NodeState,boolean,boolean)
*/
public Lock lock(NodeState nodeState, boolean isDeep, boolean isSessionScoped) throws LockException, RepositoryException {
nodeState.checkIsSessionState();
// retrieve node first
Node lhNode;
// NOTE: Node must be retrieved from the given NodeState and not from
// the overlayed workspace nodestate.
Item item = itemManager.getItem(nodeState.getHierarchyEntry());
if (item.isNode()) {
lhNode = (Node) item;
} else {
throw new RepositoryException("Internal error: ItemManager returned Property from NodeState");
}
// execute the operation
LockOperation op = LockOperation.create(nodeState, isDeep, isSessionScoped);
wspManager.execute(op);
Lock lock = new LockImpl(new LockState(nodeState, op.getLockInfo()), lhNode);
return lock;
}
/**
* @see LockManager#unlock(NodeState)
* @param nodeState
*/
public void unlock(NodeState nodeState) throws LockException, RepositoryException {
// execute the operation. Note, that its possible that the session is
// lock holder and still the lock was never accessed. thus the lockMap
// does not provide sufficient and reliable information.
Operation op = LockRelease.create(nodeState);
wspManager.execute(op);
// if unlock was successfull: clean up lock map and lock life cycle
// in case the corresponding Lock object exists (and thus has been
// added to the map.
if (lockMap.containsKey(nodeState)) {
LockImpl l = (LockImpl) lockMap.remove(nodeState);
l.lockState.unlocked();
}
}
/**
* If the session created a lock on the node with the given state, we already
* know the lock. Otherwise, the node state and its ancestores are searched
* for properties indicating a lock.<br>
* Note, that the flag indicating session-scoped lock cannot be retrieved
* unless the current session is the lock holder.
*
* @see LockManager#getLock(NodeState)
* @param nodeState
*/
public Lock getLock(NodeState nodeState) throws LockException, RepositoryException {
LockImpl l = getLockImpl(nodeState, false);
// no-lock found or lock doesn't apply to this state -> throw
if (l == null) {
throw new LockException("Node with id '" + nodeState.getNodeId() + "' is not locked.");
}
// a lock exists either on the given node state or as deep lock inherited
// from any of the ancestor states.
return l;
}
/**
* @see LockManager#isLocked(NodeState)
* @param nodeState
*/
public boolean isLocked(NodeState nodeState) throws RepositoryException {
nodeState.checkIsSessionState();
LockImpl l = getLockImpl(nodeState, false);
return l != null;
}
/**
* @see LockManager#checkLock(NodeState)
* @param nodeState
*/
public void checkLock(NodeState nodeState) throws LockException, RepositoryException {
nodeState.checkIsSessionState();
// shortcut: new status indicates that a new state was already added
// thus, the parent state is not locked by foreign lock.
if (nodeState.getStatus() == Status.NEW) {
return;
}
LockImpl l = getLockImpl(nodeState, true);
if (l != null && l.getLockToken() == null) {
// lock is present and token is null -> session is not lock-holder.
throw new LockException("Node with id '" + nodeState + "' is locked.");
} // else: state is not locked at all || session is lock-holder
}
/**
* Returns the lock tokens present on the <code>SessionInfo</code> this
* manager has been created with.
*
* @see LockManager#getLockTokens()
*/
public String[] getLockTokens() {
return wspManager.getLockTokens();
}
/**
* Delegates this call to {@link WorkspaceManager#addLockToken(String)}.
* If this succeeds this method will inform all locks stored in the local
* map in order to give them the chance to update their lock information.
*
* @see LockManager#addLockToken(String)
*/
public void addLockToken(String lt) throws LockException, RepositoryException {
wspManager.addLockToken(lt);
notifyTokenAdded(lt);
}
/**
* If the lock addressed by the token is session-scoped, this method will
* throw a LockException, such as defined by JSR170 v.1.0.1 for
* {@link Session#removeLockToken(String)}.<br>Otherwise the call is
* delegated to {@link WorkspaceManager#removeLockToken(String)}.
* All locks stored in the local lock map are notified by the removed
* token in order have them updated their lock information.
*
* @see LockManager#removeLockToken(String)
*/
public void removeLockToken(String lt) throws LockException, RepositoryException {
// JSR170 v. 1.0.1 defines that the token of a session-scoped lock may
// not be moved over to another session. thus removal ist not possible
// and the lock is always present in the lock map.
Iterator it = lockMap.values().iterator();
boolean found = false;
// loop over cached locks to determine if the token belongs to a session
// scoped lock, in which case the removal must fail immediately.
while (it.hasNext() && !found) {
LockImpl l = (LockImpl) it.next();
if (lt.equals(l.getLockToken())) {
// break as soon as the lock associated with the given token was found.
found = true;
if (l.isSessionScoped()) {
throw new LockException("Cannot remove lock token associated with a session scoped lock.");
}
}
}
// remove lock token from sessionInfo. call will fail, if the session
// is not lock holder.
wspManager.removeLockToken(lt);
// inform about this lt being removed from this session
notifyTokenRemoved(lt);
}
//----------------------------------------------------< SessionListener >---
/**
*
* @param session
* @see SessionListener#loggingOut(Session)
*/
public void loggingOut(Session session) {
// remove any session scoped locks:
NodeState[] lhStates = (NodeState[]) lockMap.keySet().toArray(new NodeState[lockMap.size()]);
for (int i = 0; i < lhStates.length; i++) {
NodeState nState = lhStates[i];
LockImpl l = (LockImpl) lockMap.get(nState);
if (l.isSessionScoped() && l.getLockToken() != null) {
try {
unlock(nState);
} catch (RepositoryException e) {
log.error("Error while unlocking session scoped lock. Cleaning up local lock status.");
// at least clean up local lock map and the locks life cycle
l.lockState.unlocked();
}
}
}
}
/**
*
* @param session
* @see SessionListener#loggedOut(Session)
*/
public void loggedOut(Session session) {
// release all remaining locks without modifying their lock status
LockImpl[] locks = (LockImpl[]) lockMap.values().toArray(new LockImpl[lockMap.size()]);
for (int i = 0; i < locks.length; i++) {
locks[i].lockState.release();
}
}
//------------------------------------------------------------< private >---
/**
* Search nearest ancestor that is locked. Returns <code>null</code> if neither
* the given state nor any of its ancestors is locked.
* Note, that this methods does NOT check if the given node state would
* be affected by the lock present on an ancestor state.
* Note, that in certain cases it might not be possible to detect a lock
* being present due to the fact that the hierarchy might be imcomplete or
* not even readable completely. For this reason it seem equally reasonable
* to search for jcr:lockIsDeep property only and omitting all kind of
* verification regarding nodetypes present.
*
* @param nodeState <code>NodeState</code> from which searching starts.
* Note, that the given state must not have an overlayed state.
* @return a state holding a lock or <code>null</code> if neither the
* given state nor any of its ancestors is locked.
*/
private NodeState getLockHoldingState(NodeState nodeState) {
NodeEntry entry = nodeState.getNodeEntry();
while (!entry.hasPropertyEntry(QName.JCR_LOCKISDEEP)) {
NodeEntry parent = entry.getParent();
if (parent == null) {
// reached root state without finding a locked node
return null;
}
entry = parent;
}
try {
return entry.getNodeState();
} catch (RepositoryException e) {
// may occur if the nodeState is not accessible or some generic
// error occured.
// for this case, assume that no lock exists and delegate final
// validation to the spi-implementation.
log.warn("Error while accessing lock holding NodeState", e);
return null;
}
}
private LockState buildLockState(NodeState nodeState) throws RepositoryException {
NodeId nId = nodeState.getNodeId();
NodeState lockHoldingState = null;
LockInfo lockInfo;
try {
lockInfo = wspManager.getLockInfo(nId);
} catch (LockException e) {
// no lock present
return null;
}
NodeId lockNodeId = lockInfo.getNodeId();
if (lockNodeId.equals(nId)) {
lockHoldingState = nodeState;
} else {
HierarchyEntry lockedEntry = wspManager.getHierarchyManager().getHierarchyEntry(lockNodeId);
if (lockedEntry.denotesNode()) {
try {
lockHoldingState = ((NodeEntry) lockedEntry).getNodeState();
} catch (RepositoryException e) {
log.warn("Cannot build LockState");
throw new RepositoryException("Cannot build LockState", e);
}
} else {
// should never occur
throw new RepositoryException("Internal error: NodeId points to a Property.");
}
}
if (lockHoldingState == null) {
return null;
} else {
return new LockState(lockHoldingState, lockInfo);
}
}
/**
* Returns the Lock that applies to the given node state (directly or
* by an inherited deep lock) or <code>null</code> if the state is not
* locked at all.
*
* @param nodeState
* @param lazyLockDiscovery If true, no extra check with the server is made in order to
* determine, whether there is really no lock present. Otherwise, the server
* is asked if a lock is present.
* @return LockImpl that applies to the given state or <code>null</code>.
* @throws RepositoryException
*/
private LockImpl getLockImpl(NodeState nodeState, boolean lazyLockDiscovery) throws RepositoryException {
nodeState.checkIsSessionState();
NodeState nState = nodeState;
// access first non-NEW state
while (nState.getStatus() == Status.NEW) {
nState = nState.getParent();
}
// shortcut: check if a given state holds a lock, which has been
// store in the lock map. see below (LockImpl) for the conditions that
// must be met in order a lock can be stored.
LockImpl l = getLockFromMap(nState);
if (l != null) {
return l;
}
LockState lState;
if (lazyLockDiscovery) {
// try to retrieve a state (ev. a parent state) that holds a lock.
NodeState lockHoldingState = getLockHoldingState(nState);
if (lockHoldingState == null) {
// assume no lock is present (might not be correct due to incomplete hierarchy)
return null;
} else {
// check lockMap again with the lockholding state
l = getLockFromMap(nState);
if (l != null) {
return l;
}
lState = buildLockState(lockHoldingState);
}
} else {
// need correct information about lock status -> retrieve lockInfo
// from the persistent layer.
lState = buildLockState(nState);
}
if (lState != null) {
// Test again if a Lock object is stored in the lockmap. Otherwise
// build the lock object and retrieve lock holding node. note that this
// may fail if the session does not have permission to see this node.
LockImpl lock = getLockFromMap(lState.lockHoldingState);
if (lock != null) {
lock.lockState.lockInfo = lState.lockInfo;
} else {
Item lockHoldingNode = itemManager.getItem(lState.lockHoldingState.getHierarchyEntry());
lock = new LockImpl(lState, (Node)lockHoldingNode);
}
// test if lock applies to the original nodestate
if (lState.appliesToNodeState(nodeState)) {
return lock;
} else {
return null; // lock exists but doesn't apply to the given state
}
} else {
// no lock at all
return null;
}
}
private LockImpl getLockFromMap(NodeState nodeState) {
try {
LockImpl l = (LockImpl) lockMap.get(nodeState);
if (l != null && l.isLive()) {
return l;
}
} catch (RepositoryException e) {
// ignore
}
return null;
}
//----------------------------< Notification about modified lock-tokens >---
/**
* Notify all <code>Lock</code>s that have been accessed so far about the
* new lock token present on the session and allow them to reload their
* lock info.
*
* @param lt
* @throws LockException
* @throws RepositoryException
*/
private void notifyTokenAdded(String lt) throws LockException, RepositoryException {
LockTokenListener[] listeners = (LockTokenListener[]) lockMap.values().toArray(new LockTokenListener[lockMap.size()]);
for (int i = 0; i < listeners.length; i++) {
listeners[i].lockTokenAdded(lt);
}
}
/**
* Notify all <code>Lock</code>s that have been accessed so far about the
* removed lock token and allow them to reload their lock info, if necessary.
*
* @param lt
* @throws LockException
* @throws RepositoryException
*/
private void notifyTokenRemoved(String lt) throws LockException, RepositoryException {
LockTokenListener[] listeners = (LockTokenListener[]) lockMap.values().toArray(new LockTokenListener[lockMap.size()]);
for (int i = 0; i < listeners.length; i++) {
listeners[i].lockTokenRemoved(lt);
}
}
//--------------------------------------------------------------------------
private class LockState implements ItemStateLifeCycleListener {
private final NodeState lockHoldingState;
private LockInfo lockInfo;
private boolean isLive = true;
private LockState(NodeState lockHoldingState, LockInfo lockInfo) {
lockHoldingState.checkIsSessionState();
this.lockHoldingState = lockHoldingState;
this.lockInfo = lockInfo;
}
private void refresh() throws RepositoryException {
// lock is still alive -> send refresh-lock operation.
Operation op = LockRefresh.create(lockHoldingState);
wspManager.execute(op);
}
/**
* Returns true, if the given node state is the lockholding state of
* this Lock object OR if this Lock is deep.
* Note, that in the latter case this method does not assert, that the
* given node state is a child state of the lockholding state.
*
* @param nodeState that must be the same or a child of the lock holding
* state stored within this lock object.
* @return true if this lock applies to the given node state.
*/
private boolean appliesToNodeState(NodeState nodeState) {
if (nodeState.getStatus() == Status.NEW) {
return lockInfo.isDeep();
} else {
if (lockHoldingState == nodeState) {
return true;
} else {
return lockInfo.isDeep();
}
}
}
/**
* Reload the lockInfo from the server.
*
* @throws LockException
* @throws RepositoryException
*/
private void reloadLockInfo() throws LockException, RepositoryException {
lockInfo = wspManager.getLockInfo(lockHoldingState.getNodeId());
}
/**
* Release this lock by removing from the lock map and unregistering
* it from event listening
*/
private void release() {
if (lockMap.containsKey(lockHoldingState)) {
lockMap.remove(lockHoldingState);
}
stopListening();
}
/**
* This lock has been removed by the current Session or by an external
* unlock request. Since a lock will never come back to life after
* unlocking, it is released an its status is reset accordingly.
*/
private void unlocked() {
if (isLive) {
isLive = false;
release();
}
}
private void startListening() {
if (cacheBehaviour == CacheBehaviour.OBSERVATION) {
try {
PropertyState ps = lockHoldingState.getPropertyState(QName.JCR_LOCKISDEEP);
ps.addListener(this);
} catch (RepositoryException e) {
log.warn("Internal error", e);
}
}
}
private void stopListening() {
if (cacheBehaviour == CacheBehaviour.OBSERVATION) {
try {
if (lockHoldingState.hasPropertyName(QName.JCR_LOCKISDEEP)) {
PropertyState ps = lockHoldingState.getPropertyState(QName.JCR_LOCKISDEEP);
ps.removeListener(this);
}
} catch (ItemNotFoundException e) {
log.debug("jcr:isDeep doesn't exist any more.");
} catch (Exception e) {
log.warn(e.getMessage());
}
}
}
//-------------------------------------< ItemStateLifeCycleListener >---
/**
* @see ItemStateLifeCycleListener#statusChanged(ItemState, int)
*/
public void statusChanged(ItemState state, int previousStatus) {
if (!isLive) {
// since we only monitor the removal of the lock (by means
// of deletion of the jcr:lockIsDeep property, we are not interested
// if the lock is not active any more.
return;
}
switch (state.getStatus()) {
case Status.REMOVED:
// this lock has been release by someone else (and not by
// a call to LockManager#unlock -> clean up and set isLive
// flag to false.
unlocked();
break;
default:
// not interested
}
}
}
//---------------------------------------------------------------< Lock >---
/**
* Inner class implementing the {@link Lock} interface.
*/
private class LockImpl implements Lock, LockTokenListener {
private final LockState lockState;
private final Node node;
private boolean reloadInfo = false; // TODO: find better solution
/**
*
* @param lockState
* Note, that the given state must not have an overlayed state.
* @param lockHoldingNode the lock holding <code>Node</code> itself.
*/
public LockImpl(LockState lockState, Node lockHoldingNode) {
this.lockState = lockState;
this.node = lockHoldingNode;
// if observation is supported OR if this is a session-scoped lock
// holded by this session -> store lock in the map
if (cacheBehaviour == CacheBehaviour.OBSERVATION) {
lockMap.put(lockState.lockHoldingState, this);
lockState.startListening();
} else if (isHoldBySession()) {
lockMap.put(lockState.lockHoldingState, this);
// open-scoped locks: the map entry and the lock information
// stored therein may become outdated if the token is transfered
// to another session -> info must be reloaded.
if (!isSessionScoped()) {
reloadInfo = true;
}
} else {
// foreign lock: info must be reloaded.
reloadInfo = true;
}
}
/**
* @see Lock#getLockOwner()
*/
public String getLockOwner() {
return getLockInfo().getOwner();
}
/**
* @see Lock#isDeep()
*/
public boolean isDeep() {
return getLockInfo().isDeep();
}
/**
* @see Lock#getNode()
*/
public Node getNode() {
return node;
}
/**
* @see Lock#getLockToken()
*/
public String getLockToken() {
updateLockInfo();
return getLockInfo().getLockToken();
}
/**
* @see Lock#isLive()
*/
public boolean isLive() throws RepositoryException {
updateLockInfo();
return lockState.isLive;
}
/**
* @see Lock#isSessionScoped()
*/
public boolean isSessionScoped() {
return getLockInfo().isSessionScoped();
}
/**
* @see Lock#refresh()
*/
public void refresh() throws LockException, RepositoryException {
if (!isLive()) {
throw new LockException("Lock is not alive any more.");
}
if (getLockToken() == null) {
// shortcut, since lock is always updated if the session became
// lock-holder of a foreign lock.
throw new LockException("Session does not hold lock.");
} else {
lockState.refresh();
}
}
//----------------------------------------------< LockTokenListener >---
/**
* A lock token as been added to the current Session. If this Lock
* object is not yet hold by the Session (thus does not know whether
* the new lock token belongs to it), it must reload the LockInfo
* from the server.
*
* @param lockToken
* @throws LockException
* @throws RepositoryException
* @see LockTokenListener#lockTokenAdded(String)
*/
public void lockTokenAdded(String lockToken) throws LockException, RepositoryException {
if (getLockToken() == null) {
// could be that this affects this lock and session became
// lock holder -> releoad info to assert.
lockState.reloadLockInfo();
}
}
/**
*
* @param lockToken
* @throws LockException
* @throws RepositoryException
* @see LockTokenListener#lockTokenRemoved(String)
*/
public void lockTokenRemoved(String lockToken) throws LockException, RepositoryException {
// reload lock info, if session gave away its lock-holder status
// for this lock.
if (lockToken.equals(getLockToken())) {
lockState.reloadLockInfo();
}
}
//--------------------------------------------------------< private >---
/**
* @return <code>LockInfo</code> stored within the <code>LockState</code>
*/
private LockInfo getLockInfo() {
return lockState.lockInfo;
}
/**
* Make sure the lock info is really up to date.
* TODO: find better solution.
*/
private void updateLockInfo() {
if (reloadInfo) {
try {
lockState.reloadLockInfo();
} catch (LockException e) {
lockState.unlocked();
} catch (RepositoryException e) {
log.error("Internal error", e);
}
} // else: nothing to do.
}
/**
* @return true if this lock is hold by this session. false otherwise.
*/
private boolean isHoldBySession() {
return lockState.lockInfo.getLockToken() != null;
}
}
//--------------------------------------------------< LockTokenListener >---
/**
*
*/
private interface LockTokenListener {
/**
*
* @param lockToken
* @throws LockException
* @throws RepositoryException
*/
void lockTokenAdded(String lockToken) throws LockException, RepositoryException;
/**
*
* @param lockToken
* @throws LockException
* @throws RepositoryException
*/
void lockTokenRemoved(String lockToken) throws LockException, RepositoryException;
}
}
| contrib/spi/jcr2spi/src/main/java/org/apache/jackrabbit/jcr2spi/lock/LockManagerImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.jcr2spi.lock;
import org.apache.jackrabbit.jcr2spi.ItemManager;
import org.apache.jackrabbit.jcr2spi.SessionListener;
import org.apache.jackrabbit.jcr2spi.WorkspaceManager;
import org.apache.jackrabbit.jcr2spi.config.CacheBehaviour;
import org.apache.jackrabbit.jcr2spi.hierarchy.NodeEntry;
import org.apache.jackrabbit.jcr2spi.hierarchy.HierarchyEntry;
import org.apache.jackrabbit.jcr2spi.operation.Operation;
import org.apache.jackrabbit.jcr2spi.operation.LockOperation;
import org.apache.jackrabbit.jcr2spi.operation.LockRelease;
import org.apache.jackrabbit.jcr2spi.operation.LockRefresh;
import org.apache.jackrabbit.jcr2spi.state.NodeState;
import org.apache.jackrabbit.jcr2spi.state.Status;
import org.apache.jackrabbit.jcr2spi.state.ItemStateLifeCycleListener;
import org.apache.jackrabbit.jcr2spi.state.ItemState;
import org.apache.jackrabbit.jcr2spi.state.PropertyState;
import org.apache.jackrabbit.spi.LockInfo;
import org.apache.jackrabbit.spi.NodeId;
import org.apache.jackrabbit.name.QName;
import org.slf4j.LoggerFactory;
import org.slf4j.Logger;
import javax.jcr.lock.Lock;
import javax.jcr.lock.LockException;
import javax.jcr.RepositoryException;
import javax.jcr.Node;
import javax.jcr.Item;
import javax.jcr.Session;
import java.util.Iterator;
import java.util.Map;
import java.util.HashMap;
/**
* <code>LockManagerImpl</code>...
* TODO: TOBEFIXED. Lock objects obtained through this mgr are not informed if another session is or becomes lock-holder and removes the lock again.
*/
public class LockManagerImpl implements LockManager, SessionListener {
private static Logger log = LoggerFactory.getLogger(LockManagerImpl.class);
/**
* WorkspaceManager used to apply and release locks as well as to retrieve
* Lock information for a given NodeState.
* NOTE: The workspace manager must not be used as ItemStateManager.
*/
private final WorkspaceManager wspManager;
private final ItemManager itemManager;
private final CacheBehaviour cacheBehaviour;
/**
* Map holding all locks that where created by this <code>Session</code> upon
* calls to {@link LockManager#lock(NodeState,boolean,boolean)} or to
* {@link LockManager#getLock(NodeState)}. The map entries are removed
* only if a lock ends his life by {@link Node#unlock()} or by implicit
* unlock upon {@link Session#logout()}.
*/
private final Map lockMap;
public LockManagerImpl(WorkspaceManager wspManager, ItemManager itemManager,
CacheBehaviour cacheBehaviour) {
this.wspManager = wspManager;
this.itemManager = itemManager;
this.cacheBehaviour = cacheBehaviour;
// use hard references in order to make sure, that entries refering
// to locks created by the current session are not removed.
lockMap = new HashMap();
}
/**
* @see LockManager#lock(NodeState,boolean,boolean)
*/
public Lock lock(NodeState nodeState, boolean isDeep, boolean isSessionScoped) throws LockException, RepositoryException {
nodeState.checkIsSessionState();
// retrieve node first
Node lhNode;
// NOTE: Node must be retrieved from the given NodeState and not from
// the overlayed workspace nodestate.
Item item = itemManager.getItem(nodeState.getHierarchyEntry());
if (item.isNode()) {
lhNode = (Node) item;
} else {
throw new RepositoryException("Internal error: ItemManager returned Property from NodeState");
}
// execute the operation
LockOperation op = LockOperation.create(nodeState, isDeep, isSessionScoped);
wspManager.execute(op);
Lock lock = new LockImpl(new LockState(nodeState, op.getLockInfo()), lhNode);
return lock;
}
/**
* @see LockManager#unlock(NodeState)
* @param nodeState
*/
public void unlock(NodeState nodeState) throws LockException, RepositoryException {
// execute the operation. Note, that its possible that the session is
// lock holder and still the lock was never accessed. thus the lockMap
// does not provide sufficient and reliable information.
Operation op = LockRelease.create(nodeState);
wspManager.execute(op);
// if unlock was successfull: clean up lock map and lock life cycle
// in case the corresponding Lock object exists (and thus has been
// added to the map.
if (lockMap.containsKey(nodeState)) {
LockImpl l = (LockImpl) lockMap.remove(nodeState);
l.lockState.unlocked();
}
}
/**
* If the session created a lock on the node with the given state, we already
* know the lock. Otherwise, the node state and its ancestores are searched
* for properties indicating a lock.<br>
* Note, that the flag indicating session-scoped lock cannot be retrieved
* unless the current session is the lock holder.
*
* @see LockManager#getLock(NodeState)
* @param nodeState
*/
public Lock getLock(NodeState nodeState) throws LockException, RepositoryException {
LockImpl l = getLockImpl(nodeState, false);
// no-lock found or lock doesn't apply to this state -> throw
if (l == null) {
throw new LockException("Node with id '" + nodeState.getNodeId() + "' is not locked.");
}
// a lock exists either on the given node state or as deep lock inherited
// from any of the ancestor states.
return l;
}
/**
* @see LockManager#isLocked(NodeState)
* @param nodeState
*/
public boolean isLocked(NodeState nodeState) throws RepositoryException {
nodeState.checkIsSessionState();
LockImpl l = getLockImpl(nodeState, false);
return l != null;
}
/**
* @see LockManager#checkLock(NodeState)
* @param nodeState
*/
public void checkLock(NodeState nodeState) throws LockException, RepositoryException {
nodeState.checkIsSessionState();
// shortcut: new status indicates that a new state was already added
// thus, the parent state is not locked by foreign lock.
if (nodeState.getStatus() == Status.NEW) {
return;
}
LockImpl l = getLockImpl(nodeState, true);
if (l != null && l.getLockToken() == null) {
// lock is present and token is null -> session is not lock-holder.
throw new LockException("Node with id '" + nodeState + "' is locked.");
} // else: state is not locked at all || session is lock-holder
}
/**
* Returns the lock tokens present on the <code>SessionInfo</code> this
* manager has been created with.
*
* @see LockManager#getLockTokens()
*/
public String[] getLockTokens() {
return wspManager.getLockTokens();
}
/**
* Delegates this call to {@link WorkspaceManager#addLockToken(String)}.
* If this succeeds this method will inform all locks stored in the local
* map in order to give them the chance to update their lock information.
*
* @see LockManager#addLockToken(String)
*/
public void addLockToken(String lt) throws LockException, RepositoryException {
wspManager.addLockToken(lt);
notifyTokenAdded(lt);
}
/**
* If the lock addressed by the token is session-scoped, this method will
* throw a LockException, such as defined by JSR170 v.1.0.1 for
* {@link Session#removeLockToken(String)}.<br>Otherwise the call is
* delegated to {@link WorkspaceManager#removeLockToken(String)}.
* All locks stored in the local lock map are notified by the removed
* token in order have them updated their lock information.
*
* @see LockManager#removeLockToken(String)
*/
public void removeLockToken(String lt) throws LockException, RepositoryException {
// JSR170 v. 1.0.1 defines that the token of a session-scoped lock may
// not be moved over to another session. thus removal ist not possible
// and the lock is always present in the lock map.
Iterator it = lockMap.values().iterator();
boolean found = false;
// loop over cached locks to determine if the token belongs to a session
// scoped lock, in which case the removal must fail immediately.
while (it.hasNext() && !found) {
LockImpl l = (LockImpl) it.next();
if (lt.equals(l.getLockToken())) {
// break as soon as the lock associated with the given token was found.
found = true;
if (l.isSessionScoped()) {
throw new LockException("Cannot remove lock token associated with a session scoped lock.");
}
}
}
// remove lock token from sessionInfo. call will fail, if the session
// is not lock holder.
wspManager.removeLockToken(lt);
// inform about this lt being removed from this session
notifyTokenRemoved(lt);
}
//----------------------------------------------------< SessionListener >---
/**
*
* @param session
* @see SessionListener#loggingOut(Session)
*/
public void loggingOut(Session session) {
// remove any session scoped locks:
NodeState[] lhStates = (NodeState[]) lockMap.keySet().toArray(new NodeState[lockMap.size()]);
for (int i = 0; i < lhStates.length; i++) {
NodeState nState = lhStates[i];
LockImpl l = (LockImpl) lockMap.get(nState);
if (l.isSessionScoped() && l.getLockToken() != null) {
try {
unlock(nState);
} catch (RepositoryException e) {
log.error("Error while unlocking session scoped lock. Cleaning up local lock status.");
// at least clean up local lock map and the locks life cycle
l.lockState.unlocked();
}
}
}
}
/**
*
* @param session
* @see SessionListener#loggedOut(Session)
*/
public void loggedOut(Session session) {
// release all remaining locks without modifying their lock status
LockImpl[] locks = (LockImpl[]) lockMap.values().toArray(new LockImpl[lockMap.size()]);
for (int i = 0; i < locks.length; i++) {
locks[i].lockState.release();
}
}
//------------------------------------------------------------< private >---
/**
* Search nearest ancestor that is locked. Returns <code>null</code> if neither
* the given state nor any of its ancestors is locked.
* Note, that this methods does NOT check if the given node state would
* be affected by the lock present on an ancestor state.
* Note, that in certain cases it might not be possible to detect a lock
* being present due to the fact that the hierarchy might be imcomplete or
* not even readable completely. For this reason it seem equally reasonable
* to search for jcr:lockIsDeep property only and omitting all kind of
* verification regarding nodetypes present.
*
* @param nodeState <code>NodeState</code> from which searching starts.
* Note, that the given state must not have an overlayed state.
* @return a state holding a lock or <code>null</code> if neither the
* given state nor any of its ancestors is locked.
*/
private NodeState getLockHoldingState(NodeState nodeState) {
NodeEntry entry = nodeState.getNodeEntry();
while (!entry.hasPropertyEntry(QName.JCR_LOCKISDEEP)) {
NodeEntry parent = entry.getParent();
if (parent == null) {
// reached root state without finding a locked node
return null;
}
entry = parent;
}
try {
return entry.getNodeState();
} catch (RepositoryException e) {
// may occur if the nodeState is not accessible or some generic
// error occured.
// for this case, assume that no lock exists and delegate final
// validation to the spi-implementation.
log.warn("Error while accessing lock holding NodeState", e);
return null;
}
}
private LockState buildLockState(NodeState nodeState) throws RepositoryException {
NodeId nId = nodeState.getNodeId();
NodeState lockHoldingState = null;
LockInfo lockInfo;
try {
lockInfo = wspManager.getLockInfo(nId);
} catch (LockException e) {
// no lock present
return null;
}
NodeId lockNodeId = lockInfo.getNodeId();
if (lockNodeId.equals(nId)) {
lockHoldingState = nodeState;
} else {
HierarchyEntry lockedEntry = wspManager.getHierarchyManager().getHierarchyEntry(lockNodeId);
if (lockedEntry.denotesNode()) {
try {
lockHoldingState = ((NodeEntry) lockedEntry).getNodeState();
} catch (RepositoryException e) {
log.warn("Cannot build LockState");
throw new RepositoryException("Cannot build LockState", e);
}
} else {
// should never occur
throw new RepositoryException("Internal error: NodeId points to a Property.");
}
}
if (lockHoldingState == null) {
return null;
} else {
return new LockState(lockHoldingState, lockInfo);
}
}
/**
* Returns the Lock that applies to the given node state (directly or
* by an inherited deep lock) or <code>null</code> if the state is not
* locked at all.
*
* @param nodeState
* @param lazyLockDiscovery If true, no extra check with the server is made in order to
* determine, whether there is really no lock present. Otherwise, the server
* is asked if a lock is present.
* @return LockImpl that applies to the given state or <code>null</code>.
* @throws RepositoryException
*/
private LockImpl getLockImpl(NodeState nodeState, boolean lazyLockDiscovery) throws RepositoryException {
nodeState.checkIsSessionState();
NodeState nState = nodeState;
// access first non-NEW state
while (nState.getStatus() == Status.NEW) {
nState = nState.getParent();
}
// shortcut: check if a given state holds a lock, which has been
// store in the lock map. see below (LockImpl) for the conditions that
// must be met in order a lock can be stored.
if (lockMap.containsKey(nState)) {
return (LockImpl) lockMap.get(nState);
}
LockState lState;
if (lazyLockDiscovery) {
// try to retrieve a state (ev. a parent state) that holds a lock.
NodeState lockHoldingState = getLockHoldingState(nState);
if (lockHoldingState == null) {
// assume no lock is present (might not be correct due to incomplete hierarchy)
return null;
} else {
// check lockMap again with the lockholding state
if (lockMap.containsKey(lockHoldingState)) {
return (LockImpl) lockMap.get(lockHoldingState);
}
lState = buildLockState(lockHoldingState);
}
} else {
// need correct information about lock status -> retrieve lockInfo
// from the persistent layer.
lState = buildLockState(nState);
}
if (lState != null) {
// Test again if a Lock object is stored in the lockmap. Otherwise
// build the lock object and retrieve lock holding node. note that this
// may fail if the session does not have permission to see this node.
LockImpl lock;
if (lockMap.containsKey(lState.lockHoldingState)) {
lock = (LockImpl) lockMap.get(lState.lockHoldingState);
lock.lockState.lockInfo = lState.lockInfo;
} else {
Item lockHoldingNode = itemManager.getItem(lState.lockHoldingState.getHierarchyEntry());
lock = new LockImpl(lState, (Node)lockHoldingNode);
}
// test if lock applies to the original nodestate
if (lState.appliesToNodeState(nodeState)) {
return lock;
} else {
return null; // lock exists but doesn't apply to the given state
}
} else {
// no lock at all
return null;
}
}
//----------------------------< Notification about modified lock-tokens >---
/**
* Notify all <code>Lock</code>s that have been accessed so far about the
* new lock token present on the session and allow them to reload their
* lock info.
*
* @param lt
* @throws LockException
* @throws RepositoryException
*/
private void notifyTokenAdded(String lt) throws LockException, RepositoryException {
LockTokenListener[] listeners = (LockTokenListener[]) lockMap.values().toArray(new LockTokenListener[lockMap.size()]);
for (int i = 0; i < listeners.length; i++) {
listeners[i].lockTokenAdded(lt);
}
}
/**
* Notify all <code>Lock</code>s that have been accessed so far about the
* removed lock token and allow them to reload their lock info, if necessary.
*
* @param lt
* @throws LockException
* @throws RepositoryException
*/
private void notifyTokenRemoved(String lt) throws LockException, RepositoryException {
LockTokenListener[] listeners = (LockTokenListener[]) lockMap.values().toArray(new LockTokenListener[lockMap.size()]);
for (int i = 0; i < listeners.length; i++) {
listeners[i].lockTokenRemoved(lt);
}
}
//--------------------------------------------------------------------------
private class LockState implements ItemStateLifeCycleListener {
private final NodeState lockHoldingState;
private LockInfo lockInfo;
private boolean isLive = true;
private LockState(NodeState lockHoldingState, LockInfo lockInfo) {
lockHoldingState.checkIsSessionState();
this.lockHoldingState = lockHoldingState;
this.lockInfo = lockInfo;
}
private void refresh() throws RepositoryException {
// lock is still alive -> send refresh-lock operation.
Operation op = LockRefresh.create(lockHoldingState);
wspManager.execute(op);
}
/**
* Returns true, if the given node state is the lockholding state of
* this Lock object OR if this Lock is deep.
* Note, that in the latter case this method does not assert, that the
* given node state is a child state of the lockholding state.
*
* @param nodeState that must be the same or a child of the lock holding
* state stored within this lock object.
* @return true if this lock applies to the given node state.
*/
private boolean appliesToNodeState(NodeState nodeState) {
if (nodeState.getStatus() == Status.NEW) {
return lockInfo.isDeep();
} else {
if (lockHoldingState == nodeState) {
return true;
} else {
return lockInfo.isDeep();
}
}
}
/**
* Reload the lockInfo from the server.
*
* @throws LockException
* @throws RepositoryException
*/
private void reloadLockInfo() throws LockException, RepositoryException {
lockInfo = wspManager.getLockInfo(lockHoldingState.getNodeId());
}
/**
* Release this lock by removing from the lock map and unregistering
* it from event listening
*/
private void release() {
if (lockMap.containsKey(lockHoldingState)) {
lockMap.remove(lockHoldingState);
}
stopListening();
}
/**
* This lock has been removed by the current Session or by an external
* unlock request. Since a lock will never come back to life after
* unlocking, it is released an its status is reset accordingly.
*/
private void unlocked() {
if (isLive) {
isLive = false;
release();
}
}
private void startListening() {
if (cacheBehaviour == CacheBehaviour.OBSERVATION) {
try {
PropertyState ps = lockHoldingState.getPropertyState(QName.JCR_LOCKISDEEP);
ps.addListener(this);
} catch (RepositoryException e) {
log.warn("Internal error", e);
}
}
}
private void stopListening() {
if (cacheBehaviour == CacheBehaviour.OBSERVATION) {
try {
PropertyState ps = lockHoldingState.getPropertyState(QName.JCR_LOCKISDEEP);
ps.removeListener(this);
} catch (RepositoryException e) {
log.warn("Internal error", e);
}
}
}
//-------------------------------------< ItemStateLifeCycleListener >---
public void statusChanged(ItemState state, int previousStatus) {
if (!isLive) {
// since we only monitor the removal of the lock (by means
// of deletion of the jcr:lockIsDeep property, we are not interested
// if the lock is not active any more.
return;
}
switch (state.getStatus()) {
case Status.REMOVED:
// this lock has been release by someone else (and not by
// a call to LockManager#unlock -> clean up and set isLive
// flag to false.
unlocked();
default:
// not interested (Todo correct?)
}
}
}
//---------------------------------------------------------------< Lock >---
/**
* Inner class implementing the {@link Lock} interface.
*/
private class LockImpl implements Lock, LockTokenListener {
private final LockState lockState;
private final Node node;
/**
*
* @param lockState
* Note, that the given state must not have an overlayed state.
* @param lockHoldingNode the lock holding <code>Node</code> itself.
*/
public LockImpl(LockState lockState, Node lockHoldingNode) {
this.lockState = lockState;
this.node = lockHoldingNode;
// if observation is supported OR if this is a session-scoped lock
// holded by this session -> store lock in the map
if (cacheBehaviour == CacheBehaviour.OBSERVATION) {
lockMap.put(lockState.lockHoldingState, this);
lockState.startListening();
} else if (isHoldBySession()) {
// TODO: TOBEFIXED. since another session may become lock-holder for
// an open-scoped lock, the map entry and the lock information
// stored therein may become outdated.
lockMap.put(lockState.lockHoldingState, this);
}
}
/**
* @see Lock#getLockOwner()
*/
public String getLockOwner() {
return getLockInfo().getOwner();
}
/**
* @see Lock#isDeep()
*/
public boolean isDeep() {
return getLockInfo().isDeep();
}
/**
* @see Lock#getNode()
*/
public Node getNode() {
return node;
}
/**
* @see Lock#getLockToken()
*/
public String getLockToken() {
return getLockInfo().getLockToken();
}
/**
* @see Lock#isLive()
*/
public boolean isLive() throws RepositoryException {
return lockState.isLive;
}
/**
* @see Lock#isSessionScoped()
*/
public boolean isSessionScoped() {
return getLockInfo().isSessionScoped();
}
/**
* @see Lock#refresh()
*/
public void refresh() throws LockException, RepositoryException {
if (!isLive()) {
throw new LockException("Lock is not alive any more.");
}
if (getLockToken() == null) {
// shortcut, since lock is always updated if the session became
// lock-holder of a foreign lock.
throw new LockException("Session does not hold lock.");
} else {
lockState.refresh();
}
}
//----------------------------------------------< LockTokenListener >---
/**
* A lock token as been added to the current Session. If this Lock
* object is not yet hold by the Session (thus does not know whether
* the new lock token belongs to it), it must reload the LockInfo
* from the server.
*
* @param lockToken
* @throws LockException
* @throws RepositoryException
* @see LockTokenListener#lockTokenAdded(String)
*/
public void lockTokenAdded(String lockToken) throws LockException, RepositoryException {
if (getLockToken() == null) {
// could be that this affects this lock and session became
// lock holder -> releoad info to assert.
lockState.reloadLockInfo();
}
}
/**
*
* @param lockToken
* @throws LockException
* @throws RepositoryException
* @see LockTokenListener#lockTokenRemoved(String)
*/
public void lockTokenRemoved(String lockToken) throws LockException, RepositoryException {
// reload lock info, if session gave away its lock-holder status
// for this lock.
if (lockToken.equals(getLockToken())) {
lockState.reloadLockInfo();
}
}
//--------------------------------------------------------< private >---
private LockInfo getLockInfo() {
return lockState.lockInfo;
}
private boolean isHoldBySession() {
return lockState.lockInfo.getLockToken() != null;
}
}
//--------------------------------------------------< LockTokenListener >---
/**
*
*/
private interface LockTokenListener {
/**
*
* @param lockToken
* @throws LockException
* @throws RepositoryException
*/
void lockTokenAdded(String lockToken) throws LockException, RepositoryException;
/**
*
* @param lockToken
* @throws LockException
* @throws RepositoryException
*/
void lockTokenRemoved(String lockToken) throws LockException, RepositoryException;
}
}
| workaround for locks that might be unlocked by another session (to be improved)
git-svn-id: 02b679d096242155780e1604e997947d154ee04a@521354 13f79535-47bb-0310-9956-ffa450edef68
| contrib/spi/jcr2spi/src/main/java/org/apache/jackrabbit/jcr2spi/lock/LockManagerImpl.java | workaround for locks that might be unlocked by another session (to be improved) |
|
Java | apache-2.0 | 00137fbd5e8729b925b78f4c0ba5699b12a2f623 | 0 | pgaref/MusicBox | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
/**
*
* @author [email protected]
*/
import java.io.BufferedInputStream;
import java.io.FileInputStream;
import javazoom.jl.player.Player;
public class MP3 {
private String filename;
private Player player;
// constructor that takes the name of an MP3 file
public MP3(String filename) {
this.filename = filename;
}
// play the MP3 file
public void play() {
try {
FileInputStream fis = new FileInputStream(filename);
BufferedInputStream bis = new BufferedInputStream(fis);
player = new Player(bis);
player.play();
}
catch (Exception e) {
System.out.println("Problem playing file " + filename);
System.out.println(e);
}
}
public static void main(String[] args) {
//plays 07.mp3 file located at C drive
MP3 mp3 = new MP3("ImperialMarch.mp3");
mp3.play();
}
}
| src/MP3.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
/**
*
* @author pgaref
*/
import java.io.BufferedInputStream;
import java.io.FileInputStream;
import javazoom.jl.player.Player;
public class MP3 {
private String filename;
private Player player;
// constructor that takes the name of an MP3 file
public MP3(String filename) {
this.filename = filename;
}
// play the MP3 file
public void play() {
try {
FileInputStream fis = new FileInputStream(filename);
BufferedInputStream bis = new BufferedInputStream(fis);
player = new Player(bis);
player.play();
}
catch (Exception e) {
System.out.println("Problem playing file " + filename);
System.out.println(e);
}
}
public static void main(String[] args) {
//plays 07.mp3 file located at C drive
MP3 mp3 = new MP3("ImperialMarch.mp3");
mp3.play();
}
}
| Testing jlib | src/MP3.java | Testing jlib |
|
Java | apache-2.0 | 4b55d688a64909f591d9be007e4b1b4db8433e49 | 0 | phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida | package ca.corefacility.bioinformatics.irida.ria.web;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.*;
import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.NcbiExportSubmissionAdminTableModel;
import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.NcbiExportSubmissionTableModel;
import ca.corefacility.bioinformatics.irida.ria.web.models.export.NcbiSubmissionModel;
import ca.corefacility.bioinformatics.irida.ria.web.models.tables.TableRequest;
import ca.corefacility.bioinformatics.irida.ria.web.models.tables.TableResponse;
import ca.corefacility.bioinformatics.irida.ria.web.services.UINcbiService;
/**
* Spring Ajax Controller to handle NCBI requests.
*/
@RestController
@RequestMapping("/ajax/ncbi")
public class NCBIAjaxController {
private final UINcbiService service;
@Autowired
public NCBIAjaxController(UINcbiService service) {
this.service = service;
}
/**
* Get a {@link List} of all NCBI Export Submissions on a Project
*
* @param projectId Identifier for a Project
* @return {@link List} of {@link NcbiExportSubmissionAdminTableModel}
*/
@RequestMapping("/project/{projectId}/list")
public ResponseEntity<List<NcbiExportSubmissionTableModel>> getNCBIExportsForProject(@PathVariable Long projectId) {
return ResponseEntity.ok(service.getNCBIExportsForProject(projectId));
}
/**
* Get the details of a specific NCBI SRA submission
*
* @param projectId Identifier for the current project
* @param exportId Identifier for the NCBI SRA Submission
* @return details about the submission
*/
@GetMapping("/project/{projectId}/details/{exportId}")
public ResponseEntity<NcbiSubmissionModel> getExportDetails(@PathVariable Long projectId,
@PathVariable Long exportId) {
return ResponseEntity.ok(service.getExportDetails(exportId));
}
/**
* Get a paged list of NCBI Export Submission based on the current page information
*
* @param request {@link TableRequest} containing details about the current page
* @return {@link TableResponse} of NCBI Export Submissions
*/
@RequestMapping("/list")
@PreAuthorize("hasRole('ROLE_ADMIN')")
public ResponseEntity<TableResponse<NcbiExportSubmissionAdminTableModel>> getNCBIExportsForAdmin(
@RequestBody TableRequest request) {
return ResponseEntity.ok(service.getNCBIExportsForAdmin(request));
}
}
| src/main/java/ca/corefacility/bioinformatics/irida/ria/web/NCBIAjaxController.java | package ca.corefacility.bioinformatics.irida.ria.web;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.*;
import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.NcbiExportSubmissionAdminTableModel;
import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.NcbiExportSubmissionTableModel;
import ca.corefacility.bioinformatics.irida.ria.web.models.export.NcbiSubmissionModel;
import ca.corefacility.bioinformatics.irida.ria.web.models.tables.TableRequest;
import ca.corefacility.bioinformatics.irida.ria.web.models.tables.TableResponse;
import ca.corefacility.bioinformatics.irida.ria.web.services.UINcbiService;
/**
* Spring Ajax Controller to handle NCBI requests.
*/
@RestController
@RequestMapping("/ajax/ncbi")
public class NCBIAjaxController {
private final UINcbiService service;
@Autowired
public NCBIAjaxController(UINcbiService service) {
this.service = service;
}
/**
* Get a {@link List} of all NCBI Export Submissions on a Project
*
* @param projectId Identifier for a Project
* @return {@link List} of {@link NcbiExportSubmissionAdminTableModel}
*/
@RequestMapping("/project/{projectId}/list")
public ResponseEntity<List<NcbiExportSubmissionTableModel>> getNCBIExportsForProject(@PathVariable Long projectId) {
return ResponseEntity.ok(service.getNCBIExportsForProject(projectId));
}
@GetMapping("/project/{projectId}/details/{exportId}")
public ResponseEntity<NcbiSubmissionModel> getExportDetails(@PathVariable Long projectId,
@PathVariable Long exportId) {
return ResponseEntity.ok(service.getExportDetails(exportId));
}
/**
* Get a paged list of NCBI Export Submission based on the current page information
*
* @param request {@link TableRequest} containing details about the current page
* @return {@link TableResponse} of NCBI Export Submissions
*/
@RequestMapping("/list")
@PreAuthorize("hasRole('ROLE_ADMIN')")
public ResponseEntity<TableResponse<NcbiExportSubmissionAdminTableModel>> getNCBIExportsForAdmin(
@RequestBody TableRequest request) {
return ResponseEntity.ok(service.getNCBIExportsForAdmin(request));
}
}
| Updated JavaDoc
| src/main/java/ca/corefacility/bioinformatics/irida/ria/web/NCBIAjaxController.java | Updated JavaDoc |
|
Java | apache-2.0 | dad7ea8a89f70ac3add1f07cdf2816c76f249402 | 0 | Praveen2112/presto,smartnews/presto,Praveen2112/presto,Praveen2112/presto,ebyhr/presto,ebyhr/presto,Praveen2112/presto,ebyhr/presto,smartnews/presto,ebyhr/presto,smartnews/presto,smartnews/presto,ebyhr/presto,smartnews/presto,Praveen2112/presto | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.testing.datatype;
import io.trino.Session;
import io.trino.spi.type.Type;
import io.trino.sql.query.QueryAssertions;
import io.trino.sql.query.QueryAssertions.QueryAssert;
import io.trino.testing.MaterializedResult;
import io.trino.testing.QueryRunner;
import io.trino.testing.sql.TestTable;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.IntStream;
import static com.google.common.base.Preconditions.checkState;
import static java.lang.String.format;
import static java.util.Collections.unmodifiableList;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.joining;
import static org.assertj.core.api.Assertions.assertThat;
public final class SqlDataTypeTest
{
public static SqlDataTypeTest create()
{
return new SqlDataTypeTest();
}
private final List<TestCase> testCases = new ArrayList<>();
private SqlDataTypeTest() {}
public SqlDataTypeTest addRoundTrip(String literal)
{
return addRoundTrip(literal, literal);
}
public SqlDataTypeTest addRoundTrip(String inputLiteral, String expectedLiteral)
{
testCases.add(new TestCase(Optional.empty(), inputLiteral, Optional.empty(), expectedLiteral));
return this;
}
public SqlDataTypeTest addRoundTrip(String inputType, String literal, Type expectedType)
{
return addRoundTrip(inputType, literal, expectedType, literal);
}
public SqlDataTypeTest addRoundTrip(String inputType, String inputLiteral, Type expectedType, String expectedLiteral)
{
testCases.add(new TestCase(Optional.of(inputType), inputLiteral, Optional.of(expectedType), expectedLiteral));
return this;
}
public SqlDataTypeTest execute(QueryRunner queryRunner, DataSetup dataSetup)
{
return execute(queryRunner, queryRunner.getDefaultSession(), dataSetup);
}
public SqlDataTypeTest execute(QueryRunner queryRunner, Session session, DataSetup dataSetup)
{
checkState(!testCases.isEmpty(), "No test cases");
try (TestTable testTable = dataSetup.setupTestTable(unmodifiableList(testCases))) {
verifySelect(queryRunner, session, testTable);
verifyPredicate(queryRunner, session, testTable);
}
return this;
}
private void verifySelect(QueryRunner queryRunner, Session session, TestTable testTable)
{
@SuppressWarnings("resource") // Closing QueryAssertions would close the QueryRunner
QueryAssertions queryAssertions = new QueryAssertions(queryRunner);
QueryAssert assertion = assertThat(queryAssertions.query(session, "SELECT * FROM " + testTable.getName()));
MaterializedResult expected = queryRunner.execute(session, testCases.stream()
.map(TestCase::getExpectedLiteral)
.collect(joining(",", "VALUES ROW(", ")")));
// Verify types if specified
for (int column = 0; column < testCases.size(); column++) {
TestCase testCase = testCases.get(column);
if (testCase.getExpectedType().isPresent()) {
Type expectedType = testCase.getExpectedType().get();
assertion.outputHasType(column, expectedType);
assertThat(expected.getTypes())
.as(format("Expected literal type at column %d (check consistency of expected type and expected literal)", column + 1))
.element(column).isEqualTo(expectedType);
}
}
assertion.matches(expected);
}
private void verifyPredicate(QueryRunner queryRunner, Session session, TestTable testTable)
{
String queryWithAll = "SELECT 'all found' FROM " + testTable.getName() + " WHERE " +
IntStream.range(0, testCases.size())
.mapToObj(this::getPredicate)
.collect(joining(" AND "));
MaterializedResult result = queryRunner.execute(session, queryWithAll);
if (result.getOnlyColumnAsSet().equals(Set.of("all found"))) {
return;
}
@SuppressWarnings("resource") // Closing QueryAssertions would close the QueryRunner
QueryAssertions queryAssertions = new QueryAssertions(queryRunner);
for (int column = 0; column < testCases.size(); column++) {
assertThat(queryAssertions.query(session, "SELECT 'found' FROM " + testTable.getName() + " WHERE " + getPredicate(column)))
.matches("VALUES 'found'");
}
}
private String getPredicate(int column)
{
return format("col_%s IS NOT DISTINCT FROM %s", column, testCases.get(column).getExpectedLiteral());
}
private static class TestCase
implements ColumnSetup
{
private final Optional<String> declaredType;
private final String inputLiteral;
private final Optional<Type> expectedType;
private final String expectedLiteral;
public TestCase(Optional<String> declaredType, String inputLiteral, Optional<Type> expectedType, String expectedLiteral)
{
this.declaredType = requireNonNull(declaredType, "declaredType is null");
this.expectedType = requireNonNull(expectedType, "expectedType is null");
this.inputLiteral = requireNonNull(inputLiteral, "inputLiteral is null");
this.expectedLiteral = requireNonNull(expectedLiteral, "expectedLiteral is null");
}
@Override
public Optional<String> getDeclaredType()
{
return declaredType;
}
@Override
public String getInputLiteral()
{
return inputLiteral;
}
public Optional<Type> getExpectedType()
{
return expectedType;
}
public String getExpectedLiteral()
{
return expectedLiteral;
}
}
}
| testing/trino-testing/src/main/java/io/trino/testing/datatype/SqlDataTypeTest.java | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.testing.datatype;
import io.trino.Session;
import io.trino.spi.type.Type;
import io.trino.sql.query.QueryAssertions;
import io.trino.sql.query.QueryAssertions.QueryAssert;
import io.trino.testing.MaterializedResult;
import io.trino.testing.QueryRunner;
import io.trino.testing.sql.TestTable;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.IntStream;
import static com.google.common.base.Preconditions.checkState;
import static java.lang.String.format;
import static java.util.Collections.unmodifiableList;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.joining;
import static org.assertj.core.api.Assertions.assertThat;
public final class SqlDataTypeTest
{
public static SqlDataTypeTest create()
{
return new SqlDataTypeTest();
}
private final List<TestCase> testCases = new ArrayList<>();
private SqlDataTypeTest() {}
public SqlDataTypeTest addRoundTrip(String literal)
{
return addRoundTrip(literal, literal);
}
public SqlDataTypeTest addRoundTrip(String inputLiteral, String expectedLiteral)
{
testCases.add(new TestCase(Optional.empty(), inputLiteral, Optional.empty(), expectedLiteral));
return this;
}
public SqlDataTypeTest addRoundTrip(String inputType, String literal, Type expectedType)
{
return addRoundTrip(inputType, literal, expectedType, literal);
}
public SqlDataTypeTest addRoundTrip(String inputType, String inputLiteral, Type expectedType, String expectedLiteral)
{
testCases.add(new TestCase(Optional.of(inputType), inputLiteral, Optional.of(expectedType), expectedLiteral));
return this;
}
public SqlDataTypeTest execute(QueryRunner queryRunner, DataSetup dataSetup)
{
return execute(queryRunner, queryRunner.getDefaultSession(), dataSetup);
}
public SqlDataTypeTest execute(QueryRunner queryRunner, Session session, DataSetup dataSetup)
{
checkState(!testCases.isEmpty(), "No test cases");
try (TestTable testTable = dataSetup.setupTestTable(unmodifiableList(testCases))) {
verifySelect(queryRunner, session, testTable);
verifyPredicate(queryRunner, session, testTable);
}
return this;
}
private void verifySelect(QueryRunner queryRunner, Session session, TestTable testTable)
{
@SuppressWarnings("resource") // Closing QueryAssertions would close the QueryRunner
QueryAssertions queryAssertions = new QueryAssertions(queryRunner);
QueryAssert assertion = assertThat(queryAssertions.query(session, "SELECT * FROM " + testTable.getName()));
MaterializedResult expected = queryRunner.execute(session, testCases.stream()
.map(TestCase::getExpectedLiteral)
.collect(joining(",", "VALUES (", ")")));
// Verify types if specified
for (int column = 0; column < testCases.size(); column++) {
TestCase testCase = testCases.get(column);
if (testCase.getExpectedType().isPresent()) {
Type expectedType = testCase.getExpectedType().get();
assertion.outputHasType(column, expectedType);
assertThat(expected.getTypes())
.as(format("Expected literal type at column %d (check consistency of expected type and expected literal)", column + 1))
.element(column).isEqualTo(expectedType);
}
}
assertion.matches(expected);
}
private void verifyPredicate(QueryRunner queryRunner, Session session, TestTable testTable)
{
String queryWithAll = "SELECT 'all found' FROM " + testTable.getName() + " WHERE " +
IntStream.range(0, testCases.size())
.mapToObj(this::getPredicate)
.collect(joining(" AND "));
MaterializedResult result = queryRunner.execute(session, queryWithAll);
if (result.getOnlyColumnAsSet().equals(Set.of("all found"))) {
return;
}
@SuppressWarnings("resource") // Closing QueryAssertions would close the QueryRunner
QueryAssertions queryAssertions = new QueryAssertions(queryRunner);
for (int column = 0; column < testCases.size(); column++) {
assertThat(queryAssertions.query(session, "SELECT 'found' FROM " + testTable.getName() + " WHERE " + getPredicate(column)))
.matches("VALUES 'found'");
}
}
private String getPredicate(int column)
{
return format("col_%s IS NOT DISTINCT FROM %s", column, testCases.get(column).getExpectedLiteral());
}
private static class TestCase
implements ColumnSetup
{
private final Optional<String> declaredType;
private final String inputLiteral;
private final Optional<Type> expectedType;
private final String expectedLiteral;
public TestCase(Optional<String> declaredType, String inputLiteral, Optional<Type> expectedType, String expectedLiteral)
{
this.declaredType = requireNonNull(declaredType, "declaredType is null");
this.expectedType = requireNonNull(expectedType, "expectedType is null");
this.inputLiteral = requireNonNull(inputLiteral, "inputLiteral is null");
this.expectedLiteral = requireNonNull(expectedLiteral, "expectedLiteral is null");
}
@Override
public Optional<String> getDeclaredType()
{
return declaredType;
}
@Override
public String getInputLiteral()
{
return inputLiteral;
}
public Optional<Type> getExpectedType()
{
return expectedType;
}
public String getExpectedLiteral()
{
return expectedLiteral;
}
}
}
| Add outer ROW to VALUES in SqlDataTypeTest.verifySelect
| testing/trino-testing/src/main/java/io/trino/testing/datatype/SqlDataTypeTest.java | Add outer ROW to VALUES in SqlDataTypeTest.verifySelect |
|
Java | apache-2.0 | 3610b7b34f5d5f96cf48d9fc5bf9575cb07aff28 | 0 | nus-ncl/services-in-one,nus-ncl/services-in-one | package sg.ncl.common.jwt;
import io.jsonwebtoken.JwtParser;
import io.jsonwebtoken.Jwts;
import io.jsonwebtoken.SignatureAlgorithm;
import io.jsonwebtoken.SignatureException;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import sg.ncl.common.authentication.AuthenticationProperties;
import javax.crypto.spec.SecretKeySpec;
import javax.inject.Inject;
import javax.validation.constraints.NotNull;
import java.security.Key;
import java.time.Duration;
import java.time.format.DateTimeParseException;
import java.util.UUID;
/**
* Initializes the necessary components to create JWTs.
*
* @author Christopher Zhong
* @version 1.0
*/
@Configuration
@ConditionalOnClass({SignatureAlgorithm.class, Key.class, Duration.class})
@EnableConfigurationProperties(JwtProperties.class)
@Slf4j
public class JwtAutoConfiguration {
static final SignatureAlgorithm DEFAULT_SIGNATURE_ALGORITHM = SignatureAlgorithm.HS512;
static final UUID DEFAULT_API_KEY = UUID.randomUUID();
static final Duration DEFAULT_EXPIRY_DURATION = Duration.ofHours(24L);
private final JwtProperties properties;
@Inject
JwtAutoConfiguration(@NotNull final JwtProperties properties) {
this.properties = properties;
}
@Bean
@ConditionalOnMissingBean(JwtFilter.class)
public JwtFilter jwtFilter(@NotNull AuthenticationProperties authenticationProperties) {
return new JwtFilter(authenticationProperties);
}
@Bean
@ConditionalOnMissingBean(JwtParser.class)
public JwtParser jwtParser(@NotNull final Key apiKey) {
return Jwts.parser().setSigningKey(apiKey);
}
@Bean
@ConditionalOnMissingBean(SignatureAlgorithm.class)
public SignatureAlgorithm signatureAlgorithm() {
final String value = properties.getSigningAlgorithm();
if (value == null || value.isEmpty()) {
log.warn("No signature algorithm was specified; using default: {}", DEFAULT_SIGNATURE_ALGORITHM);
return DEFAULT_SIGNATURE_ALGORITHM;
}
try {
final SignatureAlgorithm signatureAlgorithm = SignatureAlgorithm.forName(value);
log.info("Using specified signature algorithm: {}", signatureAlgorithm);
return signatureAlgorithm;
} catch (SignatureException e) {
log.warn("{}; using default: {}", e, DEFAULT_SIGNATURE_ALGORITHM);
return DEFAULT_SIGNATURE_ALGORITHM;
}
}
@Bean
@ConditionalOnMissingBean(Key.class)
public Key apiKey(@NotNull final SignatureAlgorithm signatureAlgorithm) {
final String value = properties.getApiKey();
if (value == null || value.isEmpty()) {
log.warn("No api key was specified; using default: {}", DEFAULT_API_KEY);
return new SecretKeySpec(DEFAULT_API_KEY.toString().getBytes(), signatureAlgorithm.getJcaName());
}
log.info("Using specified api key: {}", value);
return new SecretKeySpec(value.getBytes(), signatureAlgorithm.getJcaName());
}
@Bean
@ConditionalOnMissingBean(Duration.class)
public Duration expiryDuration() {
final String value = properties.getExpiryDuration();
if (value == null || value.isEmpty()) {
log.warn("No expiry duration was specified; using default: {}", DEFAULT_EXPIRY_DURATION);
return DEFAULT_EXPIRY_DURATION;
}
try {
final Duration duration = Duration.parse(value);
log.info("Using specified expiry duration: {}", duration);
return duration;
} catch (DateTimeParseException e) {
log.warn("{}: '{}'; using default: {}", e, value, DEFAULT_EXPIRY_DURATION);
return DEFAULT_EXPIRY_DURATION;
}
}
}
| common/src/main/java/sg/ncl/common/jwt/JwtAutoConfiguration.java | package sg.ncl.common.jwt;
import io.jsonwebtoken.JwtParser;
import io.jsonwebtoken.Jwts;
import io.jsonwebtoken.SignatureAlgorithm;
import io.jsonwebtoken.SignatureException;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import javax.crypto.spec.SecretKeySpec;
import javax.inject.Inject;
import javax.validation.constraints.NotNull;
import java.security.Key;
import java.time.Duration;
import java.time.format.DateTimeParseException;
import java.util.UUID;
/**
* Initializes the necessary components to create JWTs.
*
* @author Christopher Zhong
* @version 1.0
*/
@Configuration
@ConditionalOnClass({SignatureAlgorithm.class, Key.class, Duration.class})
@EnableConfigurationProperties(JwtProperties.class)
@Slf4j
public class JwtAutoConfiguration {
static final SignatureAlgorithm DEFAULT_SIGNATURE_ALGORITHM = SignatureAlgorithm.HS512;
static final UUID DEFAULT_API_KEY = UUID.randomUUID();
static final Duration DEFAULT_EXPIRY_DURATION = Duration.ofHours(24L);
private final JwtProperties properties;
@Inject
JwtAutoConfiguration(@NotNull final JwtProperties properties) {
this.properties = properties;
}
@Bean
@ConditionalOnMissingBean(JwtFilter.class)
public JwtFilter jwtFilter() {
return new JwtFilter();
}
@Bean
@ConditionalOnMissingBean(JwtParser.class)
public JwtParser jwtParser(@NotNull final Key apiKey) {
return Jwts.parser().setSigningKey(apiKey);
}
@Bean
@ConditionalOnMissingBean(SignatureAlgorithm.class)
public SignatureAlgorithm signatureAlgorithm() {
final String value = properties.getSigningAlgorithm();
if (value == null || value.isEmpty()) {
log.warn("No signature algorithm was specified; using default: {}", DEFAULT_SIGNATURE_ALGORITHM);
return DEFAULT_SIGNATURE_ALGORITHM;
}
try {
final SignatureAlgorithm signatureAlgorithm = SignatureAlgorithm.forName(value);
log.info("Using specified signature algorithm: {}", signatureAlgorithm);
return signatureAlgorithm;
} catch (SignatureException e) {
log.warn("{}; using default: {}", e, DEFAULT_SIGNATURE_ALGORITHM);
return DEFAULT_SIGNATURE_ALGORITHM;
}
}
@Bean
@ConditionalOnMissingBean(Key.class)
public Key apiKey(@NotNull final SignatureAlgorithm signatureAlgorithm) {
final String value = properties.getApiKey();
if (value == null || value.isEmpty()) {
log.warn("No api key was specified; using default: {}", DEFAULT_API_KEY);
return new SecretKeySpec(DEFAULT_API_KEY.toString().getBytes(), signatureAlgorithm.getJcaName());
}
log.info("Using specified api key: {}", value);
return new SecretKeySpec(value.getBytes(), signatureAlgorithm.getJcaName());
}
@Bean
@ConditionalOnMissingBean(Duration.class)
public Duration expiryDuration() {
final String value = properties.getExpiryDuration();
if (value == null || value.isEmpty()) {
log.warn("No expiry duration was specified; using default: {}", DEFAULT_EXPIRY_DURATION);
return DEFAULT_EXPIRY_DURATION;
}
try {
final Duration duration = Duration.parse(value);
log.info("Using specified expiry duration: {}", duration);
return duration;
} catch (DateTimeParseException e) {
log.warn("{}: '{}'; using default: {}", e, value, DEFAULT_EXPIRY_DURATION);
return DEFAULT_EXPIRY_DURATION;
}
}
}
| DEV-791 add authentication properties to jwtautoconfig
| common/src/main/java/sg/ncl/common/jwt/JwtAutoConfiguration.java | DEV-791 add authentication properties to jwtautoconfig |
|
Java | apache-2.0 | d768cd59fed201f86a0b37075210213942d532b9 | 0 | stillalex/jackrabbit-oak,Kast0rTr0y/jackrabbit-oak,code-distillery/jackrabbit-oak,kwin/jackrabbit-oak,chetanmeh/jackrabbit-oak,code-distillery/jackrabbit-oak,tripodsan/jackrabbit-oak,anchela/jackrabbit-oak,leftouterjoin/jackrabbit-oak,Kast0rTr0y/jackrabbit-oak,afilimonov/jackrabbit-oak,yesil/jackrabbit-oak,bdelacretaz/jackrabbit-oak,tripodsan/jackrabbit-oak,mduerig/jackrabbit-oak,chetanmeh/jackrabbit-oak,Kast0rTr0y/jackrabbit-oak,code-distillery/jackrabbit-oak,rombert/jackrabbit-oak,stillalex/jackrabbit-oak,alexkli/jackrabbit-oak,FlakyTestDetection/jackrabbit-oak,mduerig/jackrabbit-oak,francescomari/jackrabbit-oak,leftouterjoin/jackrabbit-oak,afilimonov/jackrabbit-oak,alexkli/jackrabbit-oak,catholicon/jackrabbit-oak,meggermo/jackrabbit-oak,mduerig/jackrabbit-oak,davidegiannella/jackrabbit-oak,code-distillery/jackrabbit-oak,afilimonov/jackrabbit-oak,francescomari/jackrabbit-oak,tripodsan/jackrabbit-oak,anchela/jackrabbit-oak,alexparvulescu/jackrabbit-oak,yesil/jackrabbit-oak,alexkli/jackrabbit-oak,stillalex/jackrabbit-oak,anchela/jackrabbit-oak,meggermo/jackrabbit-oak,AndreasAbdi/jackrabbit-oak,yesil/jackrabbit-oak,FlakyTestDetection/jackrabbit-oak,kwin/jackrabbit-oak,francescomari/jackrabbit-oak,rombert/jackrabbit-oak,AndreasAbdi/jackrabbit-oak,catholicon/jackrabbit-oak,mduerig/jackrabbit-oak,afilimonov/jackrabbit-oak,yesil/jackrabbit-oak,chetanmeh/jackrabbit-oak,rombert/jackrabbit-oak,davidegiannella/jackrabbit-oak,stillalex/jackrabbit-oak,kwin/jackrabbit-oak,bdelacretaz/jackrabbit-oak,FlakyTestDetection/jackrabbit-oak,joansmith/jackrabbit-oak,alexparvulescu/jackrabbit-oak,joansmith/jackrabbit-oak,joansmith/jackrabbit-oak,catholicon/jackrabbit-oak,francescomari/jackrabbit-oak,anchela/jackrabbit-oak,leftouterjoin/jackrabbit-oak,meggermo/jackrabbit-oak,alexparvulescu/jackrabbit-oak,leftouterjoin/jackrabbit-oak,joansmith/jackrabbit-oak,davidegiannella/jackrabbit-oak,chetanmeh/jackrabbit-oak,meggermo/jackrabbit-oak,stillalex/jackrabbit-oak,francescomari/jackrabbit-oak,FlakyTestDetection/jackrabbit-oak,bdelacretaz/jackrabbit-oak,ieb/jackrabbit-oak,alexparvulescu/jackrabbit-oak,FlakyTestDetection/jackrabbit-oak,ieb/jackrabbit-oak,code-distillery/jackrabbit-oak,alexparvulescu/jackrabbit-oak,mduerig/jackrabbit-oak,meggermo/jackrabbit-oak,Kast0rTr0y/jackrabbit-oak,ieb/jackrabbit-oak,joansmith/jackrabbit-oak,kwin/jackrabbit-oak,chetanmeh/jackrabbit-oak,anchela/jackrabbit-oak,bdelacretaz/jackrabbit-oak,rombert/jackrabbit-oak,davidegiannella/jackrabbit-oak,AndreasAbdi/jackrabbit-oak,catholicon/jackrabbit-oak,davidegiannella/jackrabbit-oak,AndreasAbdi/jackrabbit-oak,kwin/jackrabbit-oak,tripodsan/jackrabbit-oak,alexkli/jackrabbit-oak,alexkli/jackrabbit-oak,catholicon/jackrabbit-oak,ieb/jackrabbit-oak | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.mongomk;
import java.util.List;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import org.apache.jackrabbit.mk.api.MicroKernelException;
/**
* The interface for the backend storage for documents.
*/
public interface DocumentStore {
/**
* Get a document.
* <p>
* The returned document is immutable.
*
* @param <T> the document type
* @param collection the collection
* @param key the key
* @return the document, or null if not found
*/
@CheckForNull
<T extends Document> T find(Collection<T> collection, String key);
/**
* Get a document, ignoring the cache if the cached entry is older than the
* specified time.
* <p>
* The returned document is immutable.
*
* @param <T> the document type
* @param collection the collection
* @param key the key
* @param maxCacheAge the maximum age of the cached document
* @return the document, or null if not found
*/
@CheckForNull
<T extends Document> T find(Collection<T> collection, String key, int maxCacheAge);
/**
* Get a list of documents where the key is greater than a start value and
* less than an end value, sorted by the key.
* <p>
* The returned documents are immutable.
*
* @param <T> the document type
* @param collection the collection
* @param fromKey the start value (excluding)
* @param toKey the end value (excluding)
* @param limit the maximum number of entries to return (starting with the lowest key)
* @return the list (possibly empty)
*/
@Nonnull
<T extends Document> List<T> query(Collection<T> collection,
String fromKey,
String toKey,
int limit);
/**
* Get a list of documents where the key is greater than a start value and
* less than an end value. The returned documents are immutable.
*
* @param <T> the document type
* @param collection the collection
* @param fromKey the start value (excluding)
* @param toKey the end value (excluding)
* @param indexedProperty the name of the indexed property (optional)
* @param startValue the minimum value of the indexed property
* @param limit the maximum number of entries to return
* @return the list (possibly empty)
*/
@Nonnull
<T extends Document> List<T> query(Collection<T> collection,
String fromKey,
String toKey,
String indexedProperty,
long startValue,
int limit);
/**
* Remove a document.
*
* @param <T> the document type
* @param collection the collection
* @param key the key
*/
<T extends Document> void remove(Collection<T> collection, String key);
/**
* Try to create a list of documents.
*
* @param <T> the document type
* @param collection the collection
* @param updateOps the list of documents to add
* @return true if this worked (if none of the documents already existed)
*/
<T extends Document> boolean create(Collection<T> collection, List<UpdateOp> updateOps);
/**
* Update documents with the given keys. Only existing documents are
* updated.
*
* @param <T> the document type.
* @param collection the collection.
* @param keys the keys of the documents to update.
* @param updateOp the update operation to apply to each of the documents.
*/
<T extends Document> void update(Collection<T> collection,
List<String> keys,
UpdateOp updateOp);
/**
* Create or update a document. For MongoDb, this is using "findAndModify" with
* the "upsert" flag (insert or update). The returned document is immutable.
*
* @param <T> the document type
* @param collection the collection
* @param update the update operation
* @return the old document or <code>null</code> if it didn't exist before.
* @throws MicroKernelException if the operation failed.
*/
@CheckForNull
<T extends Document> T createOrUpdate(Collection<T> collection, UpdateOp update)
throws MicroKernelException;
/**
* Performs a conditional update (e.g. using
* {@link UpdateOp.Operation.Type#CONTAINS_MAP_ENTRY} and only updates the
* document if the condition is <code>true</code>. The returned document is
* immutable.
*
* @param <T> the document type
* @param collection the collection
* @param update the update operation with the condition
* @return the old document or <code>null</code> if the condition is not met or
* if the document wasn't found
* @throws MicroKernelException if the operation failed.
*/
@CheckForNull
<T extends Document> T findAndUpdate(Collection<T> collection, UpdateOp update)
throws MicroKernelException;
/**
* Invalidate the document cache.
*/
void invalidateCache();
/**
* Invalidate the document cache for the given key.
*
* @param <T> the document type
* @param collection the collection
* @param key the key
*/
<T extends Document> void invalidateCache(Collection<T> collection, String key);
/**
* Dispose this instance.
*/
void dispose();
/**
* Fetches the cached document. If document is not present in cache <code>null</code> would be returned
*
* @param <T> the document type
* @param collection the collection
* @param key the key
* @return cached document if present. Otherwise null
*/
@CheckForNull
<T extends Document> T getIfCached(Collection<T> collection, String key);
}
| oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/mongomk/DocumentStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.mongomk;
import java.util.List;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import org.apache.jackrabbit.mk.api.MicroKernelException;
/**
* The interface for the backend storage for documents.
*/
public interface DocumentStore {
/**
* Get a document.
* <p>
* The returned document is immutable.
*
* @param <T> the document type
* @param collection the collection
* @param key the key
* @return the document, or null if not found
*/
@CheckForNull
<T extends Document> T find(Collection<T> collection, String key);
/**
* Get a document, ignoring the cache if the cached entry is older than the
* specified time.
* <p>
* The returned document is immutable.
*
* @param <T> the document type
* @param collection the collection
* @param key the key
* @param maxCacheAge the maximum age of the cached document
* @return the document, or null if not found
*/
@CheckForNull
<T extends Document> T find(Collection<T> collection, String key, int maxCacheAge);
/**
* Get a list of documents where the key is greater than a start value and
* less than an end value, sorted by the key.
* <p>
* The returned documents are immutable.
*
* @param <T> the document type
* @param collection the collection
* @param fromKey the start value (excluding)
* @param toKey the end value (excluding)
* @param limit the maximum number of entries to return (starting with the lowest key)
* @return the list (possibly empty)
*/
@Nonnull
<T extends Document> List<T> query(Collection<T> collection,
String fromKey,
String toKey,
int limit);
/**
* Get a list of documents where the key is greater than a start value and
* less than an end value. The returned documents are immutable.
*
* @param <T> the document type
* @param collection the collection
* @param fromKey the start value (excluding)
* @param toKey the end value (excluding)
* @param indexedProperty the name of the indexed property (optional)
* @param startValue the minimum value of the indexed property
* @param limit the maximum number of entries to return
* @return the list (possibly empty)
*/
@Nonnull
<T extends Document> List<T> query(Collection<T> collection,
String fromKey,
String toKey,
String indexedProperty,
long startValue,
int limit);
/**
* Remove a document.
*
* @param <T> the document type
* @param collection the collection
* @param key the key
*/
<T extends Document> void remove(Collection<T> collection, String key);
/**
* Try to create a list of documents.
*
* @param <T> the document type
* @param collection the collection
* @param updateOps the list of documents to add
* @return true if this worked (if none of the documents already existed)
*/
<T extends Document> boolean create(Collection<T> collection, List<UpdateOp> updateOps);
/**
* Update documents with the given keys. Only existing documents are
* updated.
*
* @param <T> the document type.
* @param collection the collection.
* @param keys the keys of the documents to update.
* @param updateOp the update operation to apply to each of the documents.
*/
<T extends Document> void update(Collection<T> collection,
List<String> keys,
UpdateOp updateOp);
/**
* Create or update a document. For MongoDb, this is using "findAndModify" with
* the "upsert" flag (insert or update). The returned document is immutable.
*
* @param <T> the document type
* @param collection the collection
* @param update the update operation
* @return the old document or <code>null</code> if it didn't exist before.
* @throws MicroKernelException if the operation failed.
*/
@CheckForNull
<T extends Document> T createOrUpdate(Collection<T> collection, UpdateOp update)
throws MicroKernelException;
/**
* Performs a conditional update (e.g. using
* {@link UpdateOp.Operation.Type#CONTAINS_MAP_ENTRY} and only updates the
* document if the condition is <code>true</code>. The returned document is
* immutable.
*
* @param <T> the document type
* @param collection the collection
* @param update the update operation with the condition
* @return the old document or <code>null</code> if the condition is not met.
* @throws MicroKernelException if the operation failed.
*/
@CheckForNull
<T extends Document> T findAndUpdate(Collection<T> collection, UpdateOp update)
throws MicroKernelException;
/**
* Invalidate the document cache.
*/
void invalidateCache();
/**
* Invalidate the document cache for the given key.
*
* @param <T> the document type
* @param collection the collection
* @param key the key
*/
<T extends Document> void invalidateCache(Collection<T> collection, String key);
/**
* Dispose this instance.
*/
void dispose();
/**
* Fetches the cached document. If document is not present in cache <code>null</code> would be returned
*
* @param <T> the document type
* @param collection the collection
* @param key the key
* @return cached document if present. Otherwise null
*/
@CheckForNull
<T extends Document> T getIfCached(Collection<T> collection, String key);
}
| OAK-98 - clarify DocumentStore.findAndUpdate() for the case when the doc wasn't found
git-svn-id: 67138be12999c61558c3dd34328380c8e4523e73@1550422 13f79535-47bb-0310-9956-ffa450edef68
| oak-core/src/main/java/org/apache/jackrabbit/oak/plugins/mongomk/DocumentStore.java | OAK-98 - clarify DocumentStore.findAndUpdate() for the case when the doc wasn't found |
|
Java | apache-2.0 | 7d3721d726873b288a936344462e2fa81c28c0ea | 0 | profesorfalken/jSensors | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.profesorfalken.jsensors.manager.windows;
import com.profesorfalken.jsensors.manager.SensorsManager;
import com.profesorfalken.jsensors.manager.windows.powershell.PowerShellOperations;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author javier
*/
public class WindowsSensorsManager extends SensorsManager {
private static final Logger LOGGER = LoggerFactory.getLogger(WindowsSensorsManager.class);
private static final String LINE_BREAK = "\r\n";
private final StringBuilder sensorsData = new StringBuilder();
private final StringBuilder sensorsDebugData = new StringBuilder();
public String getSensorsData() {
String rawSensorsData = PowerShellOperations.getRawSensorsData();
if (debugMode) {
LOGGER.info(rawSensorsData);
}
return normalizeSensorsData(rawSensorsData);
}
private static String normalizeSensorsData (String rawSensorsData) {
StringBuilder normalizedSensorsData = new StringBuilder();
String[] dataLines = rawSensorsData.split("\\r?\\n");
boolean readingHardLabel = false;
boolean readingSensor = false;
for (final String dataLine : dataLines) {
if (readingHardLabel == false && "HardwareType".equals(getKey(dataLine))) {
String hardwareType = getValue(dataLine);
if ("CPU".equals(hardwareType)) {
normalizedSensorsData.append("[COMPONENT]").append(LINE_BREAK);
normalizedSensorsData.append("CPU").append(LINE_BREAK);
readingHardLabel = true;
continue;
} else if (hardwareType.toUpperCase().startsWith("GPU")) {
normalizedSensorsData.append("[COMPONENT]").append(LINE_BREAK);
normalizedSensorsData.append("GPU").append(LINE_BREAK);
readingHardLabel = true;
continue;
} else if ("HDD".equals(hardwareType)) {
normalizedSensorsData.append("[COMPONENT]").append(LINE_BREAK);
normalizedSensorsData.append("DISK").append(LINE_BREAK);
readingHardLabel = true;
continue;
}
}
if (readingHardLabel) {
if ("Name".equals(getKey(dataLine))) {
normalizedSensorsData.append("Label: ").append(getValue(dataLine)).append(LINE_BREAK);
readingHardLabel = false;
}
} else {
if ("SensorType".equals(getKey(dataLine))) {
String sensorType = getValue(dataLine);
if ("Temperature".equals(sensorType) || "Fan".equals(sensorType)) {
readingSensor = true;
continue;
}
}
if (readingSensor) {
if ("Name".equals(getKey(dataLine))) {
normalizedSensorsData.append(getValue(dataLine)).append(": ");
} else if ("Value".equals(getKey(dataLine))) {
normalizedSensorsData.append(getValue(dataLine)).append(LINE_BREAK);
readingSensor = false;
}
}
}
}
return normalizedSensorsData.toString();
}
private static String getKey(String line) {
return getData(line, 0);
}
private static String getValue(String line) {
return getData(line, 1);
}
private static String getData(String line, final int index) {
if (line.indexOf(":") > 0) {
return line.split(":", 2)[index].trim();
}
return "";
}
} | src/main/java/com/profesorfalken/jsensors/manager/windows/WindowsSensorsManager.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.profesorfalken.jsensors.manager.windows;
import com.profesorfalken.jsensors.manager.SensorsManager;
import com.profesorfalken.jsensors.manager.unix.jna.CSensors;
import com.profesorfalken.jsensors.manager.windows.powershell.PowerShellOperations;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author javier
*/
public class WindowsSensorsManager extends SensorsManager {
private static final Logger LOGGER = LoggerFactory.getLogger(WindowsSensorsManager.class);
private static final String LINE_BREAK = "\n";
private final StringBuilder sensorsData = new StringBuilder();
private final StringBuilder sensorsDebugData = new StringBuilder();
public String getSensorsData() {
//TODO Use PowerShellOperations
String rawSensorsData = PowerShellOperations.getRawSensorsData();
return normalizeSensorsData(rawSensorsData);
}
private static String normalizeSensorsData (String rawSensorsData) {
System.out.println(rawSensorsData);
return "NOTHING";
}
} | Added data normalization in windows implementation | src/main/java/com/profesorfalken/jsensors/manager/windows/WindowsSensorsManager.java | Added data normalization in windows implementation |
|
Java | apache-2.0 | b879b0eed088b3cc42d7346f06f85f048442eb2c | 0 | code4craft/webmagic,code4craft/webmagic,code4craft/webmagic,code4craft/webmagic,code4craft/webmagic,code4craft/webmagic,code4craft/webmagic | package us.codecraft.webmagic.scheduler;
import com.alibaba.fastjson.JSON;
import org.apache.commons.codec.digest.DigestUtils;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool;
import redis.clients.jedis.JedisPoolConfig;
import us.codecraft.webmagic.Request;
import us.codecraft.webmagic.Task;
import us.codecraft.webmagic.scheduler.component.DuplicateRemover;
/**
* Use Redis as url scheduler for distributed crawlers.<br>
*
* @author [email protected] <br>
* @since 0.2.0
*/
public class RedisScheduler extends DuplicateRemovedScheduler implements MonitorableScheduler, DuplicateRemover {
protected JedisPool pool;
private static final String QUEUE_PREFIX = "queue_";
private static final String SET_PREFIX = "set_";
private static final String ITEM_PREFIX = "item_";
public RedisScheduler(String host) {
this(new JedisPool(new JedisPoolConfig(), host));
}
public RedisScheduler(JedisPool pool) {
this.pool = pool;
setDuplicateRemover(this);
}
@Override
public void resetDuplicateCheck(Task task) {
Jedis jedis = pool.getResource();
try {
jedis.del(getSetKey(task));
} finally {
pool.returnResource(jedis);
}
}
@Override
public boolean isDuplicate(Request request, Task task) {
Jedis jedis = pool.getResource();
try {
return jedis.sadd(getSetKey(task), request.getUrl()) == 0;
} finally {
pool.returnResource(jedis);
}
}
@Override
protected void pushWhenNoDuplicate(Request request, Task task) {
Jedis jedis = pool.getResource();
try {
jedis.rpush(getQueueKey(task), request.getUrl());
if (request.getExtras() != null) {
String field = DigestUtils.shaHex(request.getUrl());
String value = JSON.toJSONString(request);
jedis.hset((ITEM_PREFIX + task.getUUID()), field, value);
}
} finally {
pool.returnResource(jedis);
}
}
@Override
public synchronized Request poll(Task task) {
Jedis jedis = pool.getResource();
try {
String url = jedis.lpop(getQueueKey(task));
if (url == null) {
return null;
}
String key = ITEM_PREFIX + task.getUUID();
String field = DigestUtils.shaHex(url);
byte[] bytes = jedis.hget(key.getBytes(), field.getBytes());
if (bytes != null) {
Request o = JSON.parseObject(new String(bytes), Request.class);
return o;
}
Request request = new Request(url);
return request;
} finally {
pool.returnResource(jedis);
}
}
protected String getSetKey(Task task) {
return SET_PREFIX + task.getUUID();
}
protected String getQueueKey(Task task) {
return QUEUE_PREFIX + task.getUUID();
}
protected String getItemKey(Task task)
{
return ITEM_PREFIX + task.getUUID();
}
@Override
public int getLeftRequestsCount(Task task) {
Jedis jedis = pool.getResource();
try {
Long size = jedis.llen(getQueueKey(task));
return size.intValue();
} finally {
pool.returnResource(jedis);
}
}
@Override
public int getTotalRequestsCount(Task task) {
Jedis jedis = pool.getResource();
try {
Long size = jedis.scard(getSetKey(task));
return size.intValue();
} finally {
pool.returnResource(jedis);
}
}
}
| webmagic-extension/src/main/java/us/codecraft/webmagic/scheduler/RedisScheduler.java | package us.codecraft.webmagic.scheduler;
import com.alibaba.fastjson.JSON;
import org.apache.commons.codec.digest.DigestUtils;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool;
import redis.clients.jedis.JedisPoolConfig;
import us.codecraft.webmagic.Request;
import us.codecraft.webmagic.Task;
import us.codecraft.webmagic.scheduler.component.DuplicateRemover;
/**
* Use Redis as url scheduler for distributed crawlers.<br>
*
* @author [email protected] <br>
* @since 0.2.0
*/
public class RedisScheduler extends DuplicateRemovedScheduler implements MonitorableScheduler, DuplicateRemover {
protected JedisPool pool;
private static final String QUEUE_PREFIX = "queue_";
private static final String SET_PREFIX = "set_";
private static final String ITEM_PREFIX = "item_";
public RedisScheduler(String host) {
this(new JedisPool(new JedisPoolConfig(), host));
}
public RedisScheduler(JedisPool pool) {
this.pool = pool;
setDuplicateRemover(this);
}
@Override
public void resetDuplicateCheck(Task task) {
Jedis jedis = pool.getResource();
try {
jedis.del(getSetKey(task));
} finally {
pool.returnResource(jedis);
}
}
@Override
public boolean isDuplicate(Request request, Task task) {
Jedis jedis = pool.getResource();
try {
return jedis.sadd(getSetKey(task), request.getUrl()) > 0;
} finally {
pool.returnResource(jedis);
}
}
@Override
protected void pushWhenNoDuplicate(Request request, Task task) {
Jedis jedis = pool.getResource();
try {
jedis.rpush(getQueueKey(task), request.getUrl());
if (request.getExtras() != null) {
String field = DigestUtils.shaHex(request.getUrl());
String value = JSON.toJSONString(request);
jedis.hset((ITEM_PREFIX + task.getUUID()), field, value);
}
} finally {
pool.returnResource(jedis);
}
}
@Override
public synchronized Request poll(Task task) {
Jedis jedis = pool.getResource();
try {
String url = jedis.lpop(getQueueKey(task));
if (url == null) {
return null;
}
String key = ITEM_PREFIX + task.getUUID();
String field = DigestUtils.shaHex(url);
byte[] bytes = jedis.hget(key.getBytes(), field.getBytes());
if (bytes != null) {
Request o = JSON.parseObject(new String(bytes), Request.class);
return o;
}
Request request = new Request(url);
return request;
} finally {
pool.returnResource(jedis);
}
}
protected String getSetKey(Task task) {
return SET_PREFIX + task.getUUID();
}
protected String getQueueKey(Task task) {
return QUEUE_PREFIX + task.getUUID();
}
protected String getItemKey(Task task)
{
return ITEM_PREFIX + task.getUUID();
}
@Override
public int getLeftRequestsCount(Task task) {
Jedis jedis = pool.getResource();
try {
Long size = jedis.llen(getQueueKey(task));
return size.intValue();
} finally {
pool.returnResource(jedis);
}
}
@Override
public int getTotalRequestsCount(Task task) {
Jedis jedis = pool.getResource();
try {
Long size = jedis.scard(getSetKey(task));
return size.intValue();
} finally {
pool.returnResource(jedis);
}
}
}
| fix redisscheduler #583
| webmagic-extension/src/main/java/us/codecraft/webmagic/scheduler/RedisScheduler.java | fix redisscheduler #583 |
|
Java | apache-2.0 | bf83dbc4a279940561e3afa70b4b7e7f2a84811b | 0 | ChatSecure/ChatSecureAndroid,31H0B1eV/ChatSecureAndroid,bonashen/ChatSecureAndroid,maheshwarishivam/ChatSecureAndroid,10045125/ChatSecureAndroid,eighthave/ChatSecureAndroid,Heart2009/ChatSecureAndroid,OnlyInAmerica/ChatSecureAndroid,n8fr8/ChatSecureAndroid,bonashen/ChatSecureAndroid,h2ri/ChatSecureAndroid,10045125/ChatSecureAndroid,ChatSecure/ChatSecureAndroid,joskarthic/chatsecure,joskarthic/chatsecure,maheshwarishivam/ChatSecureAndroid,prembasumatary/ChatSecureAndroid,n8fr8/AwesomeApp,kden/ChatSecureAndroid,n8fr8/ChatSecureAndroid,guardianproject/ChatSecureAndroid,guardianproject/ChatSecureAndroid,prembasumatary/ChatSecureAndroid,eighthave/ChatSecureAndroid,10045125/ChatSecureAndroid,31H0B1eV/ChatSecureAndroid,n8fr8/AwesomeApp,anvayarai/my-ChatSecure,31H0B1eV/ChatSecureAndroid,ChatSecure/ChatSecureAndroid,bonashen/ChatSecureAndroid,anvayarai/my-ChatSecure,joskarthic/chatsecure,kden/ChatSecureAndroid,Heart2009/ChatSecureAndroid,Heart2009/ChatSecureAndroid,OnlyInAmerica/ChatSecureAndroid,maheshwarishivam/ChatSecureAndroid,anvayarai/my-ChatSecure,n8fr8/AwesomeApp,guardianproject/ChatSecureAndroid,OnlyInAmerica/ChatSecureAndroid,eighthave/ChatSecureAndroid,h2ri/ChatSecureAndroid,prembasumatary/ChatSecureAndroid,kden/ChatSecureAndroid,h2ri/ChatSecureAndroid,OnlyInAmerica/ChatSecureAndroid,ChatSecure/ChatSecureAndroid,n8fr8/ChatSecureAndroid | /*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package info.guardianproject.otr.app.im.app;
import java.util.ArrayList;
import android.os.Bundle;
import android.os.Parcelable;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentPagerAdapter;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.view.PagerAdapter;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
/**
* Implementation of {@link android.support.v4.view.PagerAdapter} that
* uses a {@link Fragment} to manage each page. This class also handles
* saving and restoring of fragment's state.
*
* <p>This version of the pager is more useful when there are a large number
* of pages, working more like a list view. When pages are not visible to
* the user, their entire fragment may be destroyed, only keeping the saved
* state of that fragment. This allows the pager to hold on to much less
* memory associated with each visited page as compared to
* {@link FragmentPagerAdapter} at the cost of potentially more overhead when
* switching between pages.
*
* <p>When using FragmentPagerAdapter the host ViewPager must have a
* valid ID set.</p>
*
* <p>Subclasses only need to implement {@link #getItem(int)}
* and {@link #getCount()} to have a working adapter.
*
* <p>Here is an example implementation of a pager containing fragments of
* lists:
*
* {@sample development/samples/Support13Demos/src/com/example/android/supportv13/app/FragmentStatePagerSupport.java
* complete}
*
* <p>The <code>R.layout.fragment_pager</code> resource of the top-level fragment is:
*
* {@sample development/samples/Support13Demos/res/layout/fragment_pager.xml
* complete}
*
* <p>The <code>R.layout.fragment_pager_list</code> resource containing each
* individual fragment's layout is:
*
* {@sample development/samples/Support13Demos/res/layout/fragment_pager_list.xml
* complete}
*/
public abstract class DynamicPagerAdapter extends PagerAdapter {
private static final String TAG = "FragmentStatePagerAdapter";
private static final boolean DEBUG = false;
private final FragmentManager mFragmentManager;
private FragmentTransaction mCurTransaction = null;
private ArrayList<Fragment.SavedState> mSavedState = new ArrayList<Fragment.SavedState>();
private ArrayList<Fragment> mFragments = new ArrayList<Fragment>();
private Fragment mCurrentPrimaryItem = null;
public DynamicPagerAdapter(FragmentManager fm) {
mFragmentManager = fm;
}
/**
* Return the Fragment associated with a specified position.
*/
public abstract Fragment getItem(int position);
public Fragment getItemAt(int position) {
if (position >= mFragments.size())
return null;
return mFragments.get(position);
}
@Override
public void startUpdate(ViewGroup container) {
}
@Override
public void notifyDataSetChanged() {
// Fragments may have moved. Regenerate the fragment list.
ArrayList<Fragment> old = mFragments;
ArrayList<Fragment.SavedState> oldState = mSavedState;
mFragments = new ArrayList<Fragment>();
mSavedState = new ArrayList<Fragment.SavedState>();
for (int i = 0 ; i < old.size() ; i++) {
Fragment frag = old.get(i);
if (frag != null) {
int position = getItemPosition(frag);
if (position == POSITION_NONE)
continue;
if (position == POSITION_UNCHANGED)
position = i;
while (mFragments.size() <= position) {
mFragments.add(null);
mSavedState.add(null);
}
mFragments.set(position, frag);
if (oldState.size() > i)
mSavedState.set(position, oldState.get(i));
}
}
// The list must never shrink because other methods depend on it
while (mFragments.size() < old.size()) {
mFragments.add(null);
mSavedState.add(null);
}
super.notifyDataSetChanged();
}
@Override
public Object instantiateItem(ViewGroup container, int position) {
// If we already have this item instantiated, there is nothing
// to do. This can happen when we are restoring the entire pager
// from its saved state, where the fragment manager has already
// taken care of restoring the fragments we previously had instantiated.
if (mFragments.size() > position) {
Fragment f = mFragments.get(position);
if (f != null) {
return f;
}
}
if (mCurTransaction == null) {
mCurTransaction = mFragmentManager.beginTransaction();
}
Fragment fragment = getItem(position);
if (DEBUG) Log.v(TAG, "Adding item #" + position + ": f=" + fragment);
if (mSavedState.size() > position) {
Fragment.SavedState fss = mSavedState.get(position);
if (fss != null) {
fragment.setInitialSavedState(fss);
}
}
while (mFragments.size() <= position) {
mFragments.add(null);
}
fragment.setMenuVisibility(false);
fragment.setUserVisibleHint(false);
mFragments.set(position, fragment);
mCurTransaction.add(container.getId(), fragment);
return fragment;
}
@Override
public void destroyItem(ViewGroup container, int _position, Object object) {
Fragment fragment = (Fragment)object;
// The supplied position is unreliable. When an item is deleted, the pre-reorg position is supplied,
// but when an item is scrolled off screen due to an insert, the post-reorg position is supplied.
// Find the item ourselves.
int position = mFragments.indexOf(fragment);
if (mCurTransaction == null) {
mCurTransaction = mFragmentManager.beginTransaction();
}
// Fragment might already have been reorged out of the list
if (position >= 0)
{
if (DEBUG) Log.v(TAG, "Removing item #" + position + ": f=" + object
+ " v=" + ((Fragment)object).getView());
while (mSavedState.size() <= position) {
mSavedState.add(null);
}
mSavedState.set(position, mFragmentManager.saveFragmentInstanceState(fragment));
mFragments.set(position, null);
}
// TODO do we need to unset the visible hint, etc.?
if (mCurrentPrimaryItem == fragment)
mCurrentPrimaryItem = null;
mCurTransaction.remove(fragment);
}
@Override
public void setPrimaryItem(ViewGroup container, int position, Object object) {
Fragment fragment = (Fragment)object;
if (fragment != mCurrentPrimaryItem) {
if (mCurrentPrimaryItem != null) {
mCurrentPrimaryItem.setMenuVisibility(false);
mCurrentPrimaryItem.setUserVisibleHint(false);
}
mCurrentPrimaryItem = fragment;
if (fragment != null) {
fragment.setMenuVisibility(true);
fragment.setUserVisibleHint(true);
}
}
}
@Override
public void finishUpdate(ViewGroup container) {
if (mCurTransaction != null) {
mCurTransaction.commitAllowingStateLoss();
mCurTransaction = null;
mFragmentManager.executePendingTransactions();
}
}
@Override
public boolean isViewFromObject(View view, Object object) {
return ((Fragment)object).getView() == view;
}
@Override
public Parcelable saveState() {
Bundle state = null;
// FIXME don't save internal fragment state for now, until we figure out classloader issue
// if (mSavedState.size() > 0) {
// state = new Bundle();
// Fragment.SavedState[] fss = new Fragment.SavedState[mSavedState.size()];
// mSavedState.toArray(fss);
// state.putParcelableArray("states", fss);
// }
for (int i=0; i<mFragments.size(); i++) {
Fragment f = mFragments.get(i);
if (f != null) {
if (state == null) {
state = new Bundle();
}
String key = "f" + i;
mFragmentManager.putFragment(state, key, f);
}
}
return state;
}
@Override
public void restoreState(Parcelable state, ClassLoader loader) {
if (state != null) {
Bundle bundle = (Bundle)state;
bundle.setClassLoader(loader);
Parcelable[] fss = bundle.getParcelableArray("states");
mSavedState.clear();
mFragments.clear();
if (fss != null) {
for (int i=0; i<fss.length; i++) {
mSavedState.add((Fragment.SavedState)fss[i]);
}
}
Iterable<String> keys = bundle.keySet();
for (String key: keys) {
if (key.startsWith("f")) {
int index = Integer.parseInt(key.substring(1));
Fragment f = mFragmentManager.getFragment(bundle, key);
if (f != null) {
while (mFragments.size() <= index) {
mFragments.add(null);
}
f.setMenuVisibility(false);
mFragments.set(index, f);
} else {
Log.w(TAG, "Bad fragment at key " + key);
}
}
}
}
}
}
| src/info/guardianproject/otr/app/im/app/DynamicPagerAdapter.java | /*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package info.guardianproject.otr.app.im.app;
import java.util.ArrayList;
import android.os.Bundle;
import android.os.Parcelable;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentPagerAdapter;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.view.PagerAdapter;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
/**
* Implementation of {@link android.support.v4.view.PagerAdapter} that
* uses a {@link Fragment} to manage each page. This class also handles
* saving and restoring of fragment's state.
*
* <p>This version of the pager is more useful when there are a large number
* of pages, working more like a list view. When pages are not visible to
* the user, their entire fragment may be destroyed, only keeping the saved
* state of that fragment. This allows the pager to hold on to much less
* memory associated with each visited page as compared to
* {@link FragmentPagerAdapter} at the cost of potentially more overhead when
* switching between pages.
*
* <p>When using FragmentPagerAdapter the host ViewPager must have a
* valid ID set.</p>
*
* <p>Subclasses only need to implement {@link #getItem(int)}
* and {@link #getCount()} to have a working adapter.
*
* <p>Here is an example implementation of a pager containing fragments of
* lists:
*
* {@sample development/samples/Support13Demos/src/com/example/android/supportv13/app/FragmentStatePagerSupport.java
* complete}
*
* <p>The <code>R.layout.fragment_pager</code> resource of the top-level fragment is:
*
* {@sample development/samples/Support13Demos/res/layout/fragment_pager.xml
* complete}
*
* <p>The <code>R.layout.fragment_pager_list</code> resource containing each
* individual fragment's layout is:
*
* {@sample development/samples/Support13Demos/res/layout/fragment_pager_list.xml
* complete}
*/
public abstract class DynamicPagerAdapter extends PagerAdapter {
private static final String TAG = "FragmentStatePagerAdapter";
private static final boolean DEBUG = false;
private final FragmentManager mFragmentManager;
private FragmentTransaction mCurTransaction = null;
private ArrayList<Fragment.SavedState> mSavedState = new ArrayList<Fragment.SavedState>();
private ArrayList<Fragment> mFragments = new ArrayList<Fragment>();
private Fragment mCurrentPrimaryItem = null;
public DynamicPagerAdapter(FragmentManager fm) {
mFragmentManager = fm;
}
/**
* Return the Fragment associated with a specified position.
*/
public abstract Fragment getItem(int position);
public Fragment getItemAt(int position) {
if (position >= mFragments.size())
return null;
return mFragments.get(position);
}
@Override
public void startUpdate(ViewGroup container) {
}
@Override
public void notifyDataSetChanged() {
// Fragments may have moved. Regenerate the fragment list.
ArrayList<Fragment> old = mFragments;
ArrayList<Fragment.SavedState> oldState = mSavedState;
mFragments = new ArrayList<Fragment>();
mSavedState = new ArrayList<Fragment.SavedState>();
for (int i = 0 ; i < old.size() ; i++) {
Fragment frag = old.get(i);
if (frag != null) {
int position = getItemPosition(frag);
if (position == POSITION_NONE)
continue;
if (position == POSITION_UNCHANGED)
position = i;
while (mFragments.size() <= position) {
mFragments.add(null);
mSavedState.add(null);
}
mFragments.set(position, frag);
if (oldState.size() > i)
mSavedState.set(position, oldState.get(i));
}
}
// The list must never shrink because other methods depend on it
while (mFragments.size() < old.size()) {
mFragments.add(null);
mSavedState.add(null);
}
super.notifyDataSetChanged();
}
@Override
public Object instantiateItem(ViewGroup container, int position) {
// If we already have this item instantiated, there is nothing
// to do. This can happen when we are restoring the entire pager
// from its saved state, where the fragment manager has already
// taken care of restoring the fragments we previously had instantiated.
if (mFragments.size() > position) {
Fragment f = mFragments.get(position);
if (f != null) {
return f;
}
}
if (mCurTransaction == null) {
mCurTransaction = mFragmentManager.beginTransaction();
}
Fragment fragment = getItem(position);
if (DEBUG) Log.v(TAG, "Adding item #" + position + ": f=" + fragment);
if (mSavedState.size() > position) {
Fragment.SavedState fss = mSavedState.get(position);
if (fss != null) {
fragment.setInitialSavedState(fss);
}
}
while (mFragments.size() <= position) {
mFragments.add(null);
}
fragment.setMenuVisibility(false);
fragment.setUserVisibleHint(false);
mFragments.set(position, fragment);
mCurTransaction.add(container.getId(), fragment);
return fragment;
}
@Override
public void destroyItem(ViewGroup container, int position, Object object) {
Fragment fragment = (Fragment)object;
if (mCurTransaction == null) {
mCurTransaction = mFragmentManager.beginTransaction();
}
if (DEBUG) Log.v(TAG, "Removing item #" + position + ": f=" + object
+ " v=" + ((Fragment)object).getView());
while (mSavedState.size() <= position) {
mSavedState.add(null);
}
mSavedState.set(position, mFragmentManager.saveFragmentInstanceState(fragment));
mFragments.set(position, null);
mCurTransaction.remove(fragment);
}
@Override
public void setPrimaryItem(ViewGroup container, int position, Object object) {
Fragment fragment = (Fragment)object;
if (fragment != mCurrentPrimaryItem) {
if (mCurrentPrimaryItem != null) {
mCurrentPrimaryItem.setMenuVisibility(false);
mCurrentPrimaryItem.setUserVisibleHint(false);
}
if (fragment != null) {
fragment.setMenuVisibility(true);
fragment.setUserVisibleHint(true);
}
mCurrentPrimaryItem = fragment;
}
}
@Override
public void finishUpdate(ViewGroup container) {
if (mCurTransaction != null) {
mCurTransaction.commitAllowingStateLoss();
mCurTransaction = null;
mFragmentManager.executePendingTransactions();
}
}
@Override
public boolean isViewFromObject(View view, Object object) {
return ((Fragment)object).getView() == view;
}
@Override
public Parcelable saveState() {
Bundle state = null;
// FIXME don't save internal fragment state for now, until we figure out classloader issue
// if (mSavedState.size() > 0) {
// state = new Bundle();
// Fragment.SavedState[] fss = new Fragment.SavedState[mSavedState.size()];
// mSavedState.toArray(fss);
// state.putParcelableArray("states", fss);
// }
for (int i=0; i<mFragments.size(); i++) {
Fragment f = mFragments.get(i);
if (f != null) {
if (state == null) {
state = new Bundle();
}
String key = "f" + i;
mFragmentManager.putFragment(state, key, f);
}
}
return state;
}
@Override
public void restoreState(Parcelable state, ClassLoader loader) {
if (state != null) {
Bundle bundle = (Bundle)state;
bundle.setClassLoader(loader);
Parcelable[] fss = bundle.getParcelableArray("states");
mSavedState.clear();
mFragments.clear();
if (fss != null) {
for (int i=0; i<fss.length; i++) {
mSavedState.add((Fragment.SavedState)fss[i]);
}
}
Iterable<String> keys = bundle.keySet();
for (String key: keys) {
if (key.startsWith("f")) {
int index = Integer.parseInt(key.substring(1));
Fragment f = mFragmentManager.getFragment(bundle, key);
if (f != null) {
while (mFragments.size() <= index) {
mFragments.add(null);
}
f.setMenuVisibility(false);
mFragments.set(index, f);
} else {
Log.w(TAG, "Bad fragment at key " + key);
}
}
}
}
}
}
| More DynamicPagerAdapter fixes
fixes #2223
| src/info/guardianproject/otr/app/im/app/DynamicPagerAdapter.java | More DynamicPagerAdapter fixes |
|
Java | apache-2.0 | b120349ae4720369c53a647d9eb3fdcf085d4bc6 | 0 | scify/Memor-i,scify/Memor-i | package org.scify.memori.helper;
import com.sun.speech.freetts.Voice;
import com.sun.speech.freetts.VoiceManager;
public class Text2Speech {
private static final String VOICENAME_kevin = "kevin16";
VoiceManager voiceManager;
Voice voice;
public Text2Speech() {
voiceManager = VoiceManager.getInstance();
voice = voiceManager.getVoice(VOICENAME_kevin);
System.err.println(voiceManager.getVoiceInfo());
}
public void speak(String text) {
voice.getAudioPlayer().cancel();
voice.allocate();
voice.speak(text);
}
}
| src/main/java/org/scify/memori/helper/Text2Speech.java | package org.scify.memori.helper;
import com.sun.speech.freetts.Voice;
import com.sun.speech.freetts.VoiceManager;
public class Text2Speech {
private static final String VOICENAME_kevin = "kevin16";
private String text; // string to speech
public Text2Speech(String text) {
this.text = text;
}
public void speak() {
Voice voice;
VoiceManager voiceManager = VoiceManager.getInstance();
voice = voiceManager.getVoice(VOICENAME_kevin);
voice.allocate();
voice.speak(text);
}
}
| Improved functionality of Text to speech class
| src/main/java/org/scify/memori/helper/Text2Speech.java | Improved functionality of Text to speech class |
|
Java | apache-2.0 | 8d09d1ddb7a89cdf00f4db8957be5112fc396e1c | 0 | MichaelHussey/SolaceKafkaSourceConnector | /**
*
*/
package com.solace.kafka.connect;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Base64;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.json.JsonConverter;
import org.apache.kafka.connect.source.SourceRecord;
import org.junit.Before;
import org.junit.Test;
/**
* Test the Solace side of the Connector (doesn't send any data to Kafka).
* @author michussey
*
*/
public class SolaceSourceTaskTest implements Runnable {
Properties config;
private SolaceSourceTask task;
/**
* It's a good idea to use a Solace Cloud service for the messaging server during the unit tests.
* Register on http://cloud.solace.com/, or download a VMR from http://dev.solace.com/downloads/
*
*/
@Before
public void setup() {
Properties prop = new Properties();
InputStream inputStream = getClass().getClassLoader().getResourceAsStream("unit_test.properties");
try {
prop.load(inputStream);
} catch (IOException e) {
e.printStackTrace();
}
config = prop;
/**
config = new HashMap<String, String>();
config.put(SolaceConnectorConstants.SOLACE_URL, prop.getProperty(SolaceConnectorConstants.SOLACE_URL));
config.put(SolaceConnectorConstants.SOLACE_VPN, prop.getProperty(SolaceConnectorConstants.SOLACE_VPN));
config.put(SolaceConnectorConstants.SOLACE_USERNAME, prop.getProperty(SolaceConnectorConstants.SOLACE_USERNAME));
config.put(SolaceConnectorConstants.SOLACE_PASSWORD, prop.getProperty(SolaceConnectorConstants.SOLACE_PASSWORD));
config.put(SolaceConnectorConstants.SOLACE_TOPIC, prop.getProperty(SolaceConnectorConstants.SOLACE_TOPIC));
config.put(SolaceConnectorConstants.KAFKA_TOPIC, "solace_topic");
config.put(SolaceConnectorConstants.LONG_POLL_INTERVAL, "100");
config.put(SolaceConnectorConstants.SHORT_POLL_INTERVAL, "1");
config.put(SolaceConnectorConstants.POLL_BATCH_SIZE, "10");
// Only used in testing to send messages to Solace
config.put("REST_URL", prop.getProperty("REST_URL"));
*/
}
private static final String SCHEMAS_ENABLE_CONFIG = "schemas.enable";
private static final String SCHEMAS_CACHE_SIZE_CONFIG = "schemas.cache.size";
@SuppressWarnings("unchecked")
@Test
public void test() {
SolaceSourceTask task = new SolaceSourceTask();
List<SourceRecord> records = new ArrayList<SourceRecord>();
task.start((Map)config);
try {
RESTHelper.doSolaceREST(config, "test/foo", false);
records = task.poll();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
assertEquals(records.size(), 1);
// The Kafka data structure has a handy method to check that all mandatory fields are filled
((Struct)records.get(0).value()).validate();
// Make sure that at least the OOTB JSON converter can handle the record we create
JsonConverter converter = new JsonConverter();
Map<String, String> cc = new HashMap<String, String>();
cc.put(SCHEMAS_ENABLE_CONFIG, "true");
cc.put(SCHEMAS_CACHE_SIZE_CONFIG,"10");
converter.configure(cc, false);
byte[] jsonData = converter.fromConnectData((String)config.get(SolaceConnectorConstants.KAFKA_TOPIC), records.get(0).valueSchema(), records.get(0).value());
assertTrue(jsonData.length > 0);
System.err.println("JSON data: "+new String(jsonData));
task.stop();
}
/**
* Check that the optional properties defaults are being correctly picked up even though they aren't
* set in the properties file
*/
@SuppressWarnings("unchecked")
@Test
public void testDefaultProperties() {
SolaceSourceTask task = new SolaceSourceTask();
task.setParameters((Map)config);
assertEquals(task.reconnectRetries, SolaceConnectorConstants.DEFAULT_SOLACE_RECONNECT_RETRIES);
assertEquals(task.reconnectRetryWaitInMillis, SolaceConnectorConstants.DEFAULT_SOLACE_RECONNECT_RETRY_WAIT);
task.stop();
}
/**
* To make debugging easier
* @param args
*/
@SuppressWarnings("unchecked")
public static void main(String[] args)
{
SolaceSourceTask task = new SolaceSourceTask();
SolaceSourceTaskTest tester = new SolaceSourceTaskTest();
tester.setup();
tester.task = task;
task.start((Map)tester.config);
(new Thread(tester)).start();
}
@Override
public void run() {
while(true)
{
try {
this.task.poll();
//Thread.sleep(1000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
| src/test/java/com/solace/kafka/connect/SolaceSourceTaskTest.java | /**
*
*/
package com.solace.kafka.connect;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Base64;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.json.JsonConverter;
import org.apache.kafka.connect.source.SourceRecord;
import org.junit.Before;
import org.junit.Test;
/**
* Test the Solace side of the Connector (doesn't send any data to Kafka).
* @author michussey
*
*/
public class SolaceSourceTaskTest implements Runnable {
Properties config;
private SolaceSourceTask task;
/**
* It's a good idea to use a Solace Cloud service for the messaging server during the unit tests.
* Register on http://cloud.solace.com/, or download a VMR from http://dev.solace.com/downloads/
*
*/
@Before
public void setup() {
Properties prop = new Properties();
InputStream inputStream = getClass().getClassLoader().getResourceAsStream("unit_test.properties");
try {
prop.load(inputStream);
} catch (IOException e) {
e.printStackTrace();
}
config = prop;
/**
config = new HashMap<String, String>();
config.put(SolaceConnectorConstants.SOLACE_URL, prop.getProperty(SolaceConnectorConstants.SOLACE_URL));
config.put(SolaceConnectorConstants.SOLACE_VPN, prop.getProperty(SolaceConnectorConstants.SOLACE_VPN));
config.put(SolaceConnectorConstants.SOLACE_USERNAME, prop.getProperty(SolaceConnectorConstants.SOLACE_USERNAME));
config.put(SolaceConnectorConstants.SOLACE_PASSWORD, prop.getProperty(SolaceConnectorConstants.SOLACE_PASSWORD));
config.put(SolaceConnectorConstants.SOLACE_TOPIC, prop.getProperty(SolaceConnectorConstants.SOLACE_TOPIC));
config.put(SolaceConnectorConstants.KAFKA_TOPIC, "solace_topic");
config.put(SolaceConnectorConstants.LONG_POLL_INTERVAL, "100");
config.put(SolaceConnectorConstants.SHORT_POLL_INTERVAL, "1");
config.put(SolaceConnectorConstants.POLL_BATCH_SIZE, "10");
// Only used in testing to send messages to Solace
config.put("REST_URL", prop.getProperty("REST_URL"));
*/
}
private static final String SCHEMAS_ENABLE_CONFIG = "schemas.enable";
private static final String SCHEMAS_CACHE_SIZE_CONFIG = "schemas.cache.size";
@SuppressWarnings("unchecked")
@Test
public void test() {
SolaceSourceTask task = new SolaceSourceTask();
List<SourceRecord> records = new ArrayList<SourceRecord>();
task.start((Map)config);
try {
RESTHelper.doSolaceREST(config, "test/foo", false);
records = task.poll();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
assertEquals(records.size(), 1);
// The Kafka data structure has a handy method to check that all mandatory fields are filled
((Struct)records.get(0).value()).validate();
// Make sure that at least the OOTB JSON converter can handle the record we create
JsonConverter converter = new JsonConverter();
Map<String, String> cc = new HashMap<String, String>();
cc.put(SCHEMAS_ENABLE_CONFIG, "true");
cc.put(SCHEMAS_CACHE_SIZE_CONFIG,"10");
converter.configure(cc, false);
byte[] jsonData = converter.fromConnectData((String)config.get(SolaceConnectorConstants.KAFKA_TOPIC), records.get(0).valueSchema(), records.get(0).value());
assertTrue(jsonData.length > 0);
System.err.println("JSON data: "+new String(jsonData));
}
/**
* Check that the optional properties defaults are being correctly picked up even though they aren't
* set in the properties file
*/
@SuppressWarnings("unchecked")
@Test
public void testDefaultProperties() {
SolaceSourceTask task = new SolaceSourceTask();
task.setParameters((Map)config);
assertEquals(task.reconnectRetries, SolaceConnectorConstants.DEFAULT_SOLACE_RECONNECT_RETRIES);
assertEquals(task.reconnectRetryWaitInMillis, SolaceConnectorConstants.DEFAULT_SOLACE_RECONNECT_RETRY_WAIT);
}
/**
* To make debugging easier
* @param args
*/
@SuppressWarnings("unchecked")
public static void main(String[] args)
{
SolaceSourceTask task = new SolaceSourceTask();
SolaceSourceTaskTest tester = new SolaceSourceTaskTest();
tester.setup();
tester.task = task;
task.start((Map)tester.config);
(new Thread(tester)).start();
}
@Override
public void run() {
while(true)
{
try {
this.task.poll();
//Thread.sleep(1000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
| Call stop() on SolaceSourceTask after each task to clean up Solace resources
| src/test/java/com/solace/kafka/connect/SolaceSourceTaskTest.java | Call stop() on SolaceSourceTask after each task to clean up Solace resources |
|
Java | bsd-2-clause | 905b13d1687913a1413c24ca29ab227c961a7663 | 0 | TehSAUCE/imagej,biovoxxel/imagej,TehSAUCE/imagej,biovoxxel/imagej,biovoxxel/imagej,TehSAUCE/imagej | //
// QuitProgram.java
//
/*
ImageJ software for multidimensional image processing and analysis.
Copyright (c) 2010, ImageJDev.org.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the names of the ImageJDev.org developers nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
package imagej.core.plugins.app;
import imagej.ImageJ;
import imagej.ext.plugin.ImageJPlugin;
import imagej.ext.plugin.Menu;
import imagej.ext.plugin.Plugin;
import imagej.ui.DialogPrompt;
import imagej.ui.IUserInterface;
import imagej.ui.UIService;
/**
* Quits ImageJ.
*
* @author Grant Harris
* @author Barry DeZonia
* @author Curtis Rueden
*/
@Plugin(iconPath = "/icons/plugins/door_in.png", menu = {
@Menu(label = "File"),
@Menu(label = "Quit", weight = Double.MAX_VALUE, mnemonic = 'q',
accelerator = "control Q") })
public class QuitProgram implements ImageJPlugin {
public static final String MESSAGE = "Quit ImageJ?";
@Override
public void run() {
if (promptForQuit()) {
// TODO - save existing data
// TODO - close windows
// TODO - call ImageJ.getContext().shutdown() or some such, rather than
// using System.exit(0), which kills the entire JVM.
System.exit(0);
}
}
private boolean promptForQuit() {
final IUserInterface ui = ImageJ.get(UIService.class).getUI();
final DialogPrompt dialog =
ui.dialogPrompt(MESSAGE, "Quit",
DialogPrompt.MessageType.QUESTION_MESSAGE,
DialogPrompt.OptionType.YES_NO_OPTION);
final DialogPrompt.Result result = dialog.prompt();
return result == DialogPrompt.Result.YES_OPTION;
}
}
| core/plugins/app/src/main/java/imagej/core/plugins/app/QuitProgram.java | //
// QuitProgram.java
//
/*
ImageJ software for multidimensional image processing and analysis.
Copyright (c) 2010, ImageJDev.org.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the names of the ImageJDev.org developers nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
package imagej.core.plugins.app;
import imagej.ImageJ;
import imagej.ext.plugin.ImageJPlugin;
import imagej.ext.plugin.Menu;
import imagej.ext.plugin.Plugin;
import imagej.ui.DialogPrompt;
import imagej.ui.IUserInterface;
import imagej.ui.UIService;
/**
* Quits ImageJ.
*
* @author Grant Harris
* @author Barry DeZonia
* @author Curtis Rueden
*/
@Plugin(iconPath = "/icons/plugins/door_in.png", menu = {
@Menu(label = "File"),
@Menu(label = "Quit", weight = Double.MAX_VALUE, mnemonic = 'q',
accelerator = "control Q") })
public class QuitProgram implements ImageJPlugin {
public static final String MESSAGE = "Really quit ImageJ?";
@Override
public void run() {
if (promptForQuit()) {
// TODO - save existing data
// TODO - close windows
// TODO - call ImageJ.getContext().shutdown() or some such, rather than
// using System.exit(0), which kills the entire JVM.
System.exit(0);
}
}
private boolean promptForQuit() {
final IUserInterface ui = ImageJ.get(UIService.class).getUI();
final DialogPrompt dialog =
ui.dialogPrompt(MESSAGE, "Quit",
DialogPrompt.MessageType.QUESTION_MESSAGE,
DialogPrompt.OptionType.YES_NO_OPTION);
final DialogPrompt.Result result = dialog.prompt();
return result == DialogPrompt.Result.YES_OPTION;
}
}
| Get rid of really really really annoying really.
This used to be revision r4219.
| core/plugins/app/src/main/java/imagej/core/plugins/app/QuitProgram.java | Get rid of really really really annoying really. |
|
Java | bsd-3-clause | 076c7149bf3c22400b8db19f9d9469ad74834974 | 0 | WFRobotics/Taurus | // Declare our package (for organizational purposes)
package edu.wpi.first.wpilibj.templates;
// Import the necessary classes
import com.taurus.CompressorPins;
import com.taurus.ControlMapping;
import com.taurus.Joysticks;
import com.taurus.Logger;
import com.taurus.SensorPins;
import com.taurus.ServoPins;
import com.taurus.SolenoidPins;
import edu.wpi.first.wpilibj.IterativeRobot;
import edu.wpi.first.wpilibj.Joystick;
import edu.wpi.first.wpilibj.RobotDrive;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.Victor;
import edu.wpi.first.wpilibj.camera.AxisCamera;
import edu.wpi.first.wpilibj.DigitalInput;
import edu.wpi.first.wpilibj.Solenoid;
import edu.wpi.first.wpilibj.Compressor;
import edu.wpi.first.wpilibj.Servo;
import edu.wpi.first.wpilibj.DriverStation;
import edu.wpi.first.wpilibj.AnalogChannel;
import edu.wpi.first.wpilibj.DriverStationLCD;
/**
* This is a cleaner robot class. It's probably not the best idea to put our
* robot into a wpilibj package. I like abstraction, organization, and comments.
* Use what you want, I only rewrote this because I wanted to program some java.
* Not expecting this to be extra useful.
*
* @author Gordan Freeman is watcthing you < arkaniad AT gmail DOT com>
*/
public class RobotTemplate extends IterativeRobot {
//---------
//Variables
//---------
// Motor Objects
private RobotDrive chassis;
private Victor grabberMotor;
private final double sensitivity = 0.5;
private boolean motorInverted = true;
// Solenoid Objects
private Solenoid tFiringArmOut; // These four are firing mechanisms
private Solenoid tFiringArmIn;
private Solenoid tLoadingPinIn;
private Solenoid tLoadingPinOut;
private Solenoid tGrabberArmOut; // These two are for the grabber
private Solenoid tGrabberArmIn;
// Joysticks
// TODO Split into an enumeration
private Joystick leftStick;
private Joystick rightStick;
// State machine states
private final int stShooterStart = 0,
stShooterRetractFiringPin = 1,
stShooterRetractFiringPinWait = 2,
stShooterSetFiringArm = 3,
stShooterSetFiringArmWait = 4,
stShooterSetFiringPin = 5,
stShooterSetFiringPinWait = 6,
stShooterRetractFiringMech = 7,
stShooterRetractFiringMechWait = 8,
stShooterSafety = 9,
stShooterSafetyLatch = 10,
stShooterSafetyRetract = 11,
stShooterFireReady = 12,
stShooterFireWait = 13;
private final int stAutoStart = 0,
stAutoArmRetracting = 1,
stAutoMoveToPosition = 2,
stAutoMoveToPositionWait = 3,
stAutoFire = 4,
stAutoFireWait = 5,
stAutoMove = 6,
stAutoMoveWait = 7,
stAutoDone = 8;
// Shooter
private DigitalInput sArmL; // Sensors for the shooter state machine
private DigitalInput sArmR;
private DigitalInput sPistonL;
private DigitalInput sPistonR;
private DigitalInput sLatch;
private Compressor compressor;
private int currentShooterState = 0;
private int newShooterState = 0;
private double shooterTime = 0;
private double safetyTime = 0;
// Autonomous
private int currentAutoState = 0;
private int newAutoState = 0;
private double autoTime = 0;
// Camera
private AxisCamera camera;
private final String cameraIP = "10.48.18.11";
private DriverStation driverStation;
private Servo servoCamera;
private double servoVertical = .5;
// Ultrasonic
private AnalogChannel sSonic;
private double sonicSignal;
// Delay Constants
private final double shooterWaitPin = 2.0,
shooterWaitFire = 2.0,
autoWaitPosition = 2.0,
autoWaitFire = 2.0,
autoWaitMove = 2.0;
// Speed Constants
private final double speedStop = 0.0;
private final double speedGrabberOn = 1.0;
private final double speedMotorOn = 1.0;
// Logger
private Logger log;
private static DriverStationLCD DSOutput;
//-----------------
// Public Functions
//-----------------
/**
* This method is the first to run once the code starts up.
*/
public void robotInit() {
log = new Logger("[Core]", System.out);
log.info("Initializing main systems...");
initMotors();
initSensors();
initPneumatics();
initDrive();
initUltrasonic();
initCamera();
log.info("Initialization complete.");
}
/**
* this method starts when operator mode is enabled.
*/
public void teleopInit() {
log.info("Entering teleoperated mode. Activating controls.");
chassis.setSafetyEnabled(true);
chassis.tankDrive(leftStick, rightStick);
}
/**
* This function is ran in a loop during operator control.
*/
public void teleopPeriodic() {
chassis.tankDrive(leftStick, rightStick);
compressorTick();
shooterStateTick(false);
grabberStateTick(false);
driveControlTick();
servoTick();
ultrasoundTick();
}
/**
* This function is called periodically during autonomous mode.
*/
public void autonomousPeriodic() {
autonomousTick();
}
/**
* This function is called periodically during test mode.
*/
public void testPeriodic() {
}
//------------------
// Private Functions
//------------------
/**
* This function manages the state machine for the shooter arm.
*/
private void shooterStateTick(boolean autonomous) {
switch (currentShooterState) {
case stShooterStart: {
if (autonomous) {
log.info("Shooter in autonomous starting state.");
if (sLatch.get()) {
if (sArmL.get() || sArmR.get()) {
newShooterState = stShooterSetFiringPin;
} else {
newShooterState = stShooterSetFiringArm;
}
} else {
if (sArmL.get() || sArmR.get()) {
newShooterState = stShooterRetractFiringMech;
} else {
newShooterState = stShooterRetractFiringPin;
}
}
} else {
log.info("Shooter in starting state.");
newShooterState = stShooterRetractFiringPin;
}
break;
}
case stShooterRetractFiringPin: {
log.info("Retracting firing pin...");
tLoadingPinIn.set(false);
tLoadingPinOut.set(true);
shooterTime = Timer.getFPGATimestamp();
newShooterState = stShooterRetractFiringPinWait;
break;
}
case stShooterRetractFiringPinWait: {
if (Timer.getFPGATimestamp() - shooterTime >= shooterWaitPin) {
log.info("Firing pin retracted.");
newShooterState = stShooterSetFiringArm;
}
break;
}
case stShooterSetFiringArm: {
log.info("Setting firing arm...");
tFiringArmIn.set(false);
tFiringArmOut.set(true);
shooterTime = Timer.getFPGATimestamp();
newShooterState = stShooterSetFiringArmWait;
break;
}
case stShooterSetFiringArmWait: {
if (sArmR.get() || sArmL.get()) {
newShooterState = stShooterSetFiringPin;
}
if (leftStick.getRawButton(ControlMapping.latchLeft) && rightStick.getRawButton(ControlMapping.latchRight)) {
newShooterState = stShooterSetFiringPin;
}
break;
}
case stShooterSetFiringPin: {
log.info("Setting firing pin...");
tLoadingPinOut.set(false);
tLoadingPinIn.set(true);
shooterTime = Timer.getFPGATimestamp();
newShooterState = stShooterSetFiringPinWait;
break;
}
case stShooterSetFiringPinWait: {
if (Timer.getFPGATimestamp() - shooterTime >= shooterWaitPin) {
log.info("Firing pin set.");
newShooterState = stShooterRetractFiringMech;
}
break;
}
case stShooterRetractFiringMech: {
log.info("Retracting firing mechanism...");
tFiringArmOut.set(false);
tFiringArmIn.set(true);
shooterTime = Timer.getFPGATimestamp();
newShooterState = stShooterRetractFiringMechWait;
break;
}
case stShooterRetractFiringMechWait: {
if (!sPistonL.get() && !sPistonR.get()) {
log.info("Firing mechanism set.");
newShooterState = stShooterFireReady;
}
break;
}
case stShooterSafety: {
tFiringArmIn.set(false);
tFiringArmOut.set(true);
safetyTime = Timer.getFPGATimestamp();
newShooterState = stShooterSafetyLatch;
break;
}
case stShooterSafetyLatch: {
if (Timer.getFPGATimestamp() - safetyTime >= shooterWaitPin) {
tLoadingPinIn.set(false);
tLoadingPinOut.set(true);
newShooterState = stShooterSafetyRetract;
}
break;
}
case stShooterSafetyRetract: {
tFiringArmOut.set(false);
tFiringArmIn.set(true);
newShooterState = stShooterStart;
break;
}
case stShooterFireReady: {
if (leftStick.getRawButton(ControlMapping.fireLeft)
&& rightStick.getRawButton(ControlMapping.fireRight)) {
log.info("Firing shooter!");
tLoadingPinIn.set(false);
tLoadingPinOut.set(true);
shooterTime = Timer.getFPGATimestamp();
newShooterState = stShooterFireWait;
} else if (autonomous && currentAutoState == stAutoFire) {
tLoadingPinIn.set(false);
tLoadingPinOut.set(true);
shooterTime = Timer.getFPGATimestamp();
newShooterState = stShooterFireWait;
}
if (leftStick.getRawButton(ControlMapping.releaseLeft) && rightStick.getRawButton(ControlMapping.releaseRight)) {
newShooterState = stShooterSafety;
}
break;
}
case stShooterFireWait: {
if (Timer.getFPGATimestamp() - shooterTime >= shooterWaitFire) {
log.info("Reloading shooter.");
newShooterState = stShooterStart;
}
break;
}
default: {
log.error("Shooter should never be in this state. SOS.");
break;
}
}
currentShooterState = newShooterState;
}
/**
* This function manages the state machine for the grabber arm
*/
private void grabberStateTick(boolean autonomous) {
if (rightStick.getRawButton(ControlMapping.grabberArmDown) && rightStick.getRawButton(ControlMapping.grabberArmUp)) {
// If both buttons are pressed, report an error.
log.error("Too many buttons pressed, grabber arm cannot exist in two positions simultaneously!");
tGrabberArmOut.set(false);
tGrabberArmIn.set(true);
} else if (rightStick.getRawButton(ControlMapping.grabberArmDown) || autonomous) {
log.info("Arm extended.");
tGrabberArmOut.set(true);
tGrabberArmIn.set(false);
} else if (rightStick.getRawButton(ControlMapping.grabberArmUp)) {
log.info("Arm retracted.");
tGrabberArmOut.set(false);
tGrabberArmIn.set(true);
}
if (leftStick.getRawButton(ControlMapping.grabberMotorForward) && leftStick.getRawButton(ControlMapping.grabberMotorReverse)) {
log.error("Too many buttons pressed, grabber motor cannot exist in two states!");
grabberMotor.set(0.0);
} else if (leftStick.getRawButton(ControlMapping.grabberMotorForward)) {
log.info("Grabber motor forward");
grabberMotor.set(speedGrabberOn);
} else if (leftStick.getRawButton(ControlMapping.grabberMotorReverse)) {
log.info("Grabber motor reverse");
grabberMotor.set(-speedGrabberOn);
} else {
grabberMotor.set(speedStop);
}
}
/**
* This function manages the compressor.
*/
private void compressorTick() {
// If the tank is low on pressure, stop it. Otherwise make sure its on.
if (!compressor.getPressureSwitchValue()) {
compressor.start();
} else {
compressor.stop();
}
}
/**
* This function manages the control facing switch
*/
private void driveControlTick() {
if(rightStick.getRawButton(ControlMapping.driveFacing)) {
motorInverted = !motorInverted;
setInvertedMotors(motorInverted);
}
}
/**
* This function controls the robot in autonomous mode.
*/
private void autonomousTick() {
chassis.setSafetyEnabled(false);
compressorTick();
switch (currentAutoState) {
case stAutoStart: {
newAutoState = stAutoArmRetracting;
break;
}
case stAutoArmRetracting: {
log.info("Retracting arm");
grabberStateTick(true);
newAutoState = stAutoMoveToPosition;
break;
}
case stAutoMoveToPosition: {
log.info("Moving into firing position.");
chassis.drive(speedMotorOn, 0);
autoTime = Timer.getFPGATimestamp();
newAutoState = stAutoMoveToPositionWait;
shooterStateTick(true);
break;
}
case stAutoMoveToPositionWait: {
if (Timer.getFPGATimestamp() - autoTime >= autoWaitPosition) {
chassis.drive(speedStop, 0);
}
shooterStateTick(true);
if (Timer.getFPGATimestamp() - autoTime >= autoWaitPosition
&& currentShooterState == stShooterFireReady) {
//TODO Fix this magic number
newAutoState = stAutoFire;
}
break;
}
case stAutoFire: {
log.info("Firing!");
shooterStateTick(true);
newAutoState = stAutoFireWait;
autoTime = Timer.getFPGATimestamp();
break;
}
case stAutoFireWait: {
if (Timer.getFPGATimestamp() - autoTime >= autoWaitFire) {
newAutoState = stAutoMove;
}
break;
}
case stAutoMove: {
log.info("Moving after firing..");
chassis.drive(speedMotorOn, 0);
autoTime = Timer.getFPGATimestamp();
newAutoState = stAutoMoveWait;
break;
}
case stAutoMoveWait: {
if (Timer.getFPGATimestamp() - autoTime >= autoWaitMove) {
chassis.drive(speedStop, 0);
newAutoState = stAutoDone;
}
break;
}
case stAutoDone: {
break;
}
}
currentAutoState = newAutoState;
}
/**
* Initialize the motor subsystem.
*/
private void initMotors() {
log.info("Initializing motors...");
chassis = new RobotDrive(1,2,3,4); // Initialize all four drive motors
grabberMotor = new Victor(5); // Initialize the grabber motor
setInvertedMotors(motorInverted);
chassis.setMaxOutput(sensitivity);
}
/**
* Initialize the sensor subsystem.
*/
private void initSensors() {
log.info("Initializing sensors...");
sArmL = new DigitalInput(SensorPins.armSensorLeft);
sArmR = new DigitalInput(SensorPins.armSensorRight);
sPistonL = new DigitalInput(SensorPins.armPistonLeft);
sPistonR = new DigitalInput(SensorPins.armPistonRight);
sLatch = new DigitalInput(SensorPins.latch);
}
/**
* Initialize the pneumatics subsystem.
*/
private void initPneumatics() {
log.info("Initializing solenoids...");
tFiringArmIn = new Solenoid(SolenoidPins.firingArmIn);
tFiringArmOut = new Solenoid(SolenoidPins.firingArmOut);
tLoadingPinIn = new Solenoid(SolenoidPins.loadingPinIn);
tLoadingPinOut = new Solenoid(SolenoidPins.loadingPinOut);
tGrabberArmIn = new Solenoid(SolenoidPins.grabberArmIn);
tGrabberArmOut = new Solenoid(SolenoidPins.grabberArmOut);
log.info("Initializing compressor...");
compressor = new Compressor(CompressorPins.relay,
CompressorPins.pressure);
}
/**
* Initialize the drive subsystem.
*/
private void initDrive() {
log.info("Initializing drive subsystem...");
leftStick = new Joystick(Joysticks.left);
rightStick = new Joystick(Joysticks.right);
chassis.tankDrive(leftStick, rightStick);
chassis.setInvertedMotor(RobotDrive.MotorType.kRearLeft, false);
chassis.setInvertedMotor(RobotDrive.MotorType.kRearRight, false);
}
/**
* Initialize the camera servos
*/
private void initCamera() {
log.info("Initializing camera servo...");
servoCamera = new Servo(ServoPins.cameraServo);
}
/**
* Initialize ultrasonic system
*/
private void initUltrasonic() {
log.info("Initializing ultrasonic sensor...");
sSonic = new AnalogChannel(1);
}
/**
* This control manages the servos for the camera.
*/
private void servoTick() {
if (leftStick.getRawButton(ControlMapping.camUp)) {
servoVertical = servoVertical + .005;
} else if (leftStick.getRawButton(ControlMapping.camDown)) {
servoVertical = servoVertical - .005;
}
servoCamera.set(servoVertical);
}
/**
* This control manages the ultrasound measurement.
*/
private int counter = 0;
private double runningAverage[];
private double AveragedSignal;
private void ultrasoundTick() {
sonicSignal = sSonic.getAverageVoltage();
sonicSignal = (sonicSignal * 1000) / 11.47;
log.dbg("Ultrasonic reading: " + String.valueOf(Math.floor(sonicSignal/12)) + "ft. "
+ String.valueOf(Math.floor(sonicSignal%12)) + "in.");
//runningAverage[counter] = sonicSignal;
if(counter == 15)
{
//AveragedSignal = 0;
for(int i=0; i < 15; i++)
{
//AveragedSignal = AveragedSignal + runningAverage[i];
}
AveragedSignal = sonicSignal;//AveragedSignal/15;
counter = 0;
DriverStationLCD.getInstance().println(DriverStationLCD.Line.kUser1, 1,
"Dist: " + String.valueOf(Math.floor(AveragedSignal/12)) + "ft. "
+ String.valueOf(Math.floor(AveragedSignal%12)) + "in.");
DriverStationLCD.getInstance().updateLCD();
}
counter = counter + 1;
}
/**
* This function sets all the motor inversions en masse
*/
private void setInvertedMotors(boolean inverted) {
chassis.setInvertedMotor(RobotDrive.MotorType.kRearLeft, inverted);
chassis.setInvertedMotor(RobotDrive.MotorType.kRearRight, inverted);
chassis.setInvertedMotor(RobotDrive.MotorType.kFrontLeft, !inverted);
chassis.setInvertedMotor(RobotDrive.MotorType.kFrontRight, !inverted);
}
}
| src/edu/wpi/first/wpilibj/templates/RobotTemplate.java | // Declare our package (for organizational purposes)
package edu.wpi.first.wpilibj.templates;
// Import the necessary classes
import com.taurus.CompressorPins;
import com.taurus.ControlMapping;
import com.taurus.Joysticks;
import com.taurus.Logger;
import com.taurus.SensorPins;
import com.taurus.ServoPins;
import com.taurus.SolenoidPins;
import edu.wpi.first.wpilibj.IterativeRobot;
import edu.wpi.first.wpilibj.Joystick;
import edu.wpi.first.wpilibj.RobotDrive;
import edu.wpi.first.wpilibj.Timer;
import edu.wpi.first.wpilibj.Victor;
import edu.wpi.first.wpilibj.camera.AxisCamera;
import edu.wpi.first.wpilibj.DigitalInput;
import edu.wpi.first.wpilibj.Solenoid;
import edu.wpi.first.wpilibj.Compressor;
import edu.wpi.first.wpilibj.Servo;
import edu.wpi.first.wpilibj.DriverStation;
import edu.wpi.first.wpilibj.AnalogChannel;
/**
* This is a cleaner robot class. It's probably not the best idea to put our
* robot into a wpilibj package. I like abstraction, organization, and comments.
* Use what you want, I only rewrote this because I wanted to program some java.
* Not expecting this to be extra useful.
*
* @author Tanner Danzey < arkaniad AT gmail DOT com>
*/
public class RobotTemplate extends IterativeRobot {
//---------
//Variables
//---------
// Motor Objects
private RobotDrive chassis;
private Victor grabberMotor;
private final double sensitivity = 0.5;
private boolean motorInverted = true;
// Solenoid Objects
private Solenoid tFiringArmOut; // These four are firing mechanisms
private Solenoid tFiringArmIn;
private Solenoid tLoadingPinIn;
private Solenoid tLoadingPinOut;
private Solenoid tGrabberArmOut; // These two are for the grabber
private Solenoid tGrabberArmIn;
// Joysticks
// TODO Split into an enumeration
private Joystick leftStick;
private Joystick rightStick;
// State machine states
private final int stShooterStart = 0,
stShooterRetractFiringPin = 1,
stShooterRetractFiringPinWait = 2,
stShooterSetFiringArm = 3,
stShooterSetFiringArmWait = 4,
stShooterSetFiringPin = 5,
stShooterSetFiringPinWait = 6,
stShooterRetractFiringMech = 7,
stShooterRetractFiringMechWait = 8,
stShooterSafety = 9,
stShooterSafetyLatch = 10,
stShooterSafetyRetract = 11,
stShooterFireReady = 12,
stShooterFireWait = 13;
private final int stAutoStart = 0,
stAutoArmRetracting = 1,
stAutoMoveToPosition = 2,
stAutoMoveToPositionWait = 3,
stAutoFire = 4,
stAutoFireWait = 5,
stAutoMove = 6,
stAutoMoveWait = 7,
stAutoDone = 8;
// Shooter
private DigitalInput sArmL; // Sensors for the shooter state machine
private DigitalInput sArmR;
private DigitalInput sPistonL;
private DigitalInput sPistonR;
private DigitalInput sLatch;
private Compressor compressor;
private int currentShooterState = 0;
private int newShooterState = 0;
private double shooterTime = 0;
private double safetyTime = 0;
// Autonomous
private int currentAutoState = 0;
private int newAutoState = 0;
private double autoTime = 0;
// Camera
private AxisCamera camera;
private final String cameraIP = "10.48.18.11";
private DriverStation driverStation;
private Servo servoCamera;
private double servoVertical = .5;
// Ultrasonic
private AnalogChannel sSonic;
private double sonicSignal;
// Delay Constants
private final double shooterWaitPin = 2.0,
shooterWaitFire = 2.0,
autoWaitPosition = 2.0,
autoWaitFire = 2.0,
autoWaitMove = 2.0;
// Speed Constants
private final double speedStop = 0.0;
private final double speedGrabberOn = 1.0;
private final double speedMotorOn = 1.0;
// Logger
private Logger log;
//-----------------
// Public Functions
//-----------------
/**
* This method is the first to run once the code starts up.
*/
public void robotInit() {
log = new Logger("[Core]", System.out);
log.info("Initializing main systems...");
initMotors();
initSensors();
initPneumatics();
initDrive();
initUltrasonic();
initCamera();
log.info("Initialization complete.");
}
/**
* this method starts when operator mode is enabled.
*/
public void teleopInit() {
log.info("Entering teleoperated mode. Activating controls.");
chassis.setSafetyEnabled(true);
chassis.tankDrive(leftStick, rightStick);
}
/**
* This function is ran in a loop during operator control.
*/
public void teleopPeriodic() {
chassis.tankDrive(leftStick, rightStick);
compressorTick();
shooterStateTick(false);
grabberStateTick(false);
driveControlTick();
servoTick();
ultrasoundTick();
}
/**
* This function is called periodically during autonomous mode.
*/
public void autonomousPeriodic() {
autonomousTick();
}
/**
* This function is called periodically during test mode.
*/
public void testPeriodic() {
}
//------------------
// Private Functions
//------------------
/**
* This function manages the state machine for the shooter arm.
*/
private void shooterStateTick(boolean autonomous) {
switch (currentShooterState) {
case stShooterStart: {
if (autonomous) {
log.info("Shooter in autonomous starting state.");
if (sLatch.get()) {
if (sArmL.get() || sArmR.get()) {
newShooterState = stShooterSetFiringPin;
} else {
newShooterState = stShooterSetFiringArm;
}
} else {
if (sArmL.get() || sArmR.get()) {
newShooterState = stShooterRetractFiringMech;
} else {
newShooterState = stShooterRetractFiringPin;
}
}
} else {
log.info("Shooter in starting state.");
newShooterState = stShooterRetractFiringPin;
}
break;
}
case stShooterRetractFiringPin: {
log.info("Retracting firing pin...");
tLoadingPinIn.set(false);
tLoadingPinOut.set(true);
shooterTime = Timer.getFPGATimestamp();
newShooterState = stShooterRetractFiringPinWait;
break;
}
case stShooterRetractFiringPinWait: {
if (Timer.getFPGATimestamp() - shooterTime >= shooterWaitPin) {
log.info("Firing pin retracted.");
newShooterState = stShooterSetFiringArm;
}
break;
}
case stShooterSetFiringArm: {
log.info("Setting firing arm...");
tFiringArmIn.set(false);
tFiringArmOut.set(true);
shooterTime = Timer.getFPGATimestamp();
newShooterState = stShooterSetFiringArmWait;
break;
}
case stShooterSetFiringArmWait: {
if (sArmR.get() || sArmL.get()) {
newShooterState = stShooterSetFiringPin;
}
if (leftStick.getRawButton(ControlMapping.latchLeft) && rightStick.getRawButton(ControlMapping.latchRight)) {
newShooterState = stShooterSetFiringPin;
}
break;
}
case stShooterSetFiringPin: {
log.info("Setting firing pin...");
tLoadingPinOut.set(false);
tLoadingPinIn.set(true);
shooterTime = Timer.getFPGATimestamp();
newShooterState = stShooterSetFiringPinWait;
break;
}
case stShooterSetFiringPinWait: {
if (Timer.getFPGATimestamp() - shooterTime >= shooterWaitPin) {
log.info("Firing pin set.");
newShooterState = stShooterRetractFiringMech;
}
break;
}
case stShooterRetractFiringMech: {
log.info("Retracting firing mechanism...");
tFiringArmOut.set(false);
tFiringArmIn.set(true);
shooterTime = Timer.getFPGATimestamp();
newShooterState = stShooterRetractFiringMechWait;
break;
}
case stShooterRetractFiringMechWait: {
if (!sPistonL.get() && !sPistonR.get()) {
log.info("Firing mechanism set.");
newShooterState = stShooterFireReady;
}
break;
}
case stShooterSafety: {
tFiringArmIn.set(false);
tFiringArmOut.set(true);
safetyTime = Timer.getFPGATimestamp();
newShooterState = stShooterSafetyLatch;
break;
}
case stShooterSafetyLatch: {
if (Timer.getFPGATimestamp() - safetyTime >= shooterWaitPin) {
tLoadingPinIn.set(false);
tLoadingPinOut.set(true);
newShooterState = stShooterSafetyRetract;
}
break;
}
case stShooterSafetyRetract: {
tFiringArmOut.set(false);
tFiringArmIn.set(true);
newShooterState = stShooterStart;
break;
}
case stShooterFireReady: {
if (leftStick.getRawButton(ControlMapping.fireLeft)
&& rightStick.getRawButton(ControlMapping.fireRight)) {
log.info("Firing shooter!");
tLoadingPinIn.set(false);
tLoadingPinOut.set(true);
shooterTime = Timer.getFPGATimestamp();
newShooterState = stShooterFireWait;
} else if (autonomous && currentAutoState == stAutoFire) {
tLoadingPinIn.set(false);
tLoadingPinOut.set(true);
shooterTime = Timer.getFPGATimestamp();
newShooterState = stShooterFireWait;
}
if (leftStick.getRawButton(ControlMapping.releaseLeft) && rightStick.getRawButton(ControlMapping.releaseRight)) {
newShooterState = stShooterSafety;
}
break;
}
case stShooterFireWait: {
if (Timer.getFPGATimestamp() - shooterTime >= shooterWaitFire) {
log.info("Reloading shooter.");
newShooterState = stShooterStart;
}
break;
}
default: {
log.error("Shooter should never be in this state. SOS.");
break;
}
}
currentShooterState = newShooterState;
}
/**
* This function manages the state machine for the grabber arm
*/
private void grabberStateTick(boolean autonomous) {
if (rightStick.getRawButton(ControlMapping.grabberArmDown) && rightStick.getRawButton(ControlMapping.grabberArmUp)) {
// If both buttons are pressed, report an error.
log.error("Too many buttons pressed, grabber arm cannot exist in two positions simultaneously!");
tGrabberArmOut.set(false);
tGrabberArmIn.set(true);
} else if (rightStick.getRawButton(ControlMapping.grabberArmDown) || autonomous) {
log.info("Arm extended.");
tGrabberArmOut.set(true);
tGrabberArmIn.set(false);
} else if (rightStick.getRawButton(ControlMapping.grabberArmUp)) {
log.info("Arm retracted.");
tGrabberArmOut.set(false);
tGrabberArmIn.set(true);
}
if (leftStick.getRawButton(ControlMapping.grabberMotorForward) && leftStick.getRawButton(ControlMapping.grabberMotorReverse)) {
log.error("Too many buttons pressed, grabber motor cannot exist in two states!");
grabberMotor.set(0.0);
} else if (leftStick.getRawButton(ControlMapping.grabberMotorForward)) {
log.info("Grabber motor forward");
grabberMotor.set(speedGrabberOn);
} else if (leftStick.getRawButton(ControlMapping.grabberMotorReverse)) {
log.info("Grabber motor reverse");
grabberMotor.set(-speedGrabberOn);
} else {
grabberMotor.set(speedStop);
}
}
/**
* This function manages the compressor.
*/
private void compressorTick() {
// If the tank is low on pressure, stop it. Otherwise make sure its on.
if (!compressor.getPressureSwitchValue()) {
compressor.start();
} else {
compressor.stop();
}
}
/**
* This function manages the control facing switch
*/
private void driveControlTick() {
if(rightStick.getRawButton(ControlMapping.driveFacing)) {
motorInverted = !motorInverted;
setInvertedMotors(motorInverted);
}
}
/**
* This function controls the robot in autonomous mode.
*/
private void autonomousTick() {
chassis.setSafetyEnabled(false);
compressorTick();
switch (currentAutoState) {
case stAutoStart: {
newAutoState = stAutoArmRetracting;
break;
}
case stAutoArmRetracting: {
log.info("Retracting arm");
grabberStateTick(true);
newAutoState = stAutoMoveToPosition;
break;
}
case stAutoMoveToPosition: {
log.info("Moving into firing position.");
chassis.drive(speedMotorOn, 0);
autoTime = Timer.getFPGATimestamp();
newAutoState = stAutoMoveToPositionWait;
shooterStateTick(true);
break;
}
case stAutoMoveToPositionWait: {
if (Timer.getFPGATimestamp() - autoTime >= autoWaitPosition) {
chassis.drive(speedStop, 0);
}
shooterStateTick(true);
if (Timer.getFPGATimestamp() - autoTime >= autoWaitPosition
&& currentShooterState == stShooterFireReady) {
//TODO Fix this magic number
newAutoState = stAutoFire;
}
break;
}
case stAutoFire: {
log.info("Firing!");
shooterStateTick(true);
newAutoState = stAutoFireWait;
autoTime = Timer.getFPGATimestamp();
break;
}
case stAutoFireWait: {
if (Timer.getFPGATimestamp() - autoTime >= autoWaitFire) {
newAutoState = stAutoMove;
}
break;
}
case stAutoMove: {
log.info("Moving after firing..");
chassis.drive(speedMotorOn, 0);
autoTime = Timer.getFPGATimestamp();
newAutoState = stAutoMoveWait;
break;
}
case stAutoMoveWait: {
if (Timer.getFPGATimestamp() - autoTime >= autoWaitMove) {
chassis.drive(speedStop, 0);
newAutoState = stAutoDone;
}
break;
}
case stAutoDone: {
break;
}
}
currentAutoState = newAutoState;
}
/**
* Initialize the motor subsystem.
*/
private void initMotors() {
log.info("Initializing motors...");
chassis = new RobotDrive(1,2,3,4); // Initialize all four drive motors
grabberMotor = new Victor(5); // Initialize the grabber motor
setInvertedMotors(motorInverted);
chassis.setMaxOutput(sensitivity);
}
/**
* Initialize the sensor subsystem.
*/
private void initSensors() {
log.info("Initializing sensors...");
sArmL = new DigitalInput(SensorPins.armSensorLeft);
sArmR = new DigitalInput(SensorPins.armSensorRight);
sPistonL = new DigitalInput(SensorPins.armPistonLeft);
sPistonR = new DigitalInput(SensorPins.armPistonRight);
sLatch = new DigitalInput(SensorPins.latch);
}
/**
* Initialize the pneumatics subsystem.
*/
private void initPneumatics() {
log.info("Initializing solenoids...");
tFiringArmIn = new Solenoid(SolenoidPins.firingArmIn);
tFiringArmOut = new Solenoid(SolenoidPins.firingArmOut);
tLoadingPinIn = new Solenoid(SolenoidPins.loadingPinIn);
tLoadingPinOut = new Solenoid(SolenoidPins.loadingPinOut);
tGrabberArmIn = new Solenoid(SolenoidPins.grabberArmIn);
tGrabberArmOut = new Solenoid(SolenoidPins.grabberArmOut);
log.info("Initializing compressor...");
compressor = new Compressor(CompressorPins.relay,
CompressorPins.pressure);
}
/**
* Initialize the drive subsystem.
*/
private void initDrive() {
log.info("Initializing drive subsystem...");
leftStick = new Joystick(Joysticks.left);
rightStick = new Joystick(Joysticks.right);
chassis.tankDrive(leftStick, rightStick);
chassis.setInvertedMotor(RobotDrive.MotorType.kRearLeft, false);
chassis.setInvertedMotor(RobotDrive.MotorType.kRearRight, false);
}
/**
* Initialize the camera servos
*/
private void initCamera() {
log.info("Initializing camera servo...");
servoCamera = new Servo(ServoPins.cameraServo);
}
/**
* Initialize ultrasonic system
*/
private void initUltrasonic() {
log.info("Initializing ultrasonic sensor...");
sSonic = new AnalogChannel(1);
}
/**
* This control manages the servos for the camera.
*/
private void servoTick() {
if (leftStick.getRawButton(ControlMapping.camUp)) {
servoVertical = servoVertical + .005;
} else if (leftStick.getRawButton(ControlMapping.camDown)) {
servoVertical = servoVertical - .005;
}
servoCamera.set(servoVertical);
}
/**
* This control manages the ultrasound measurement.
*/
private void ultrasoundTick() {
sonicSignal = sSonic.getAverageVoltage();
sonicSignal = (sonicSignal * 100) / 11.47;
log.dbg("Ultrasonic reading: " + String.valueOf(sonicSignal));
}
/**
* This function sets all the motor inversions en masse
*/
private void setInvertedMotors(boolean inverted) {
chassis.setInvertedMotor(RobotDrive.MotorType.kRearLeft, inverted);
chassis.setInvertedMotor(RobotDrive.MotorType.kRearRight, inverted);
chassis.setInvertedMotor(RobotDrive.MotorType.kFrontLeft, !inverted);
chassis.setInvertedMotor(RobotDrive.MotorType.kFrontRight, !inverted);
}
}
| Added working ultra sonic and driver display. needs averaging still | src/edu/wpi/first/wpilibj/templates/RobotTemplate.java | Added working ultra sonic and driver display. needs averaging still |
|
Java | mit | b97b2a2a9fe64501d5a88e4d7c8126859f082361 | 0 | josuecb/Scientific-Calculator | MSym.java | /**
* Created by Josue on 12/12/2016.
*/
public class MSym {
private String label;
private long value;
public MSym(String label, long value) {
this.label = label;
this.value = value;
}
public boolean equals(MSym symbol) {
return (this.label.equals(symbol.getLabel()));
}
public long getValue() {
return this.value;
}
public String getLabel() {
return this.label;
}
}
| Deprecated
use MSymbol instead | MSym.java | Deprecated |
||
Java | apache-2.0 | 002e3feef76135fd8cb87b6769f997336d65c509 | 0 | realityforge/arez,realityforge/arez,realityforge/arez | integration-tests/src/test/java/arez/integration/DisposingOnDisposeIntegrationTest.java | package arez.integration;
import arez.Arez;
import arez.ArezContext;
import arez.ComputedValue;
import arez.Disposable;
import arez.ObservableValue;
import arez.Observer;
import arez.Priority;
import arez.Procedure;
import arez.SafeFunction;
import arez.SafeProcedure;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import javax.annotation.Nonnull;
import org.realityforge.guiceyloops.shared.ValueUtil;
import org.testng.annotations.Test;
import static org.testng.Assert.*;
public class DisposingOnDisposeIntegrationTest
extends AbstractArezIntegrationTest
{
static final class Watcher
implements Disposable
{
private final ComputedValue<Boolean> _condition;
private final SafeProcedure _effect;
private final Observer _observer;
Watcher( @Nonnull final SafeFunction<Boolean> condition, @Nonnull final SafeProcedure effect )
{
_effect = effect;
final ArezContext context = Arez.context();
_condition = context.computed( null, null, condition, null, this::dispose, null, Priority.NORMAL );
final Procedure onDispose = () -> Disposable.dispose( _condition );
_observer =
context.autorun( null, null, true, this::checkCondition, Priority.NORMAL, false, true, true );
context.triggerScheduler();
}
Observer getObserver()
{
return _observer;
}
private void checkCondition()
{
if ( Disposable.isNotDisposed( _condition ) && _condition.get() )
{
Arez.context().safeAction( (String) null, true, true, _effect );
Disposable.dispose( _observer );
}
}
@Override
public void dispose()
{
Disposable.dispose( _observer );
}
@Override
public boolean isDisposed()
{
return Disposable.isDisposed( _observer );
}
ComputedValue<Boolean> getCondition()
{
return _condition;
}
}
@Test
public void dispose_releasesResources()
throws Throwable
{
final ArezContext context = Arez.context();
final ObservableValue observable = context.observable();
final AtomicBoolean result = new AtomicBoolean();
final AtomicInteger conditionRun = new AtomicInteger();
final AtomicInteger effectRun = new AtomicInteger();
final SafeFunction<Boolean> condition = () -> {
conditionRun.incrementAndGet();
observable.reportObserved();
return result.get();
};
final SafeProcedure procedure = effectRun::incrementAndGet;
final Watcher watcher = new Watcher( condition, procedure );
assertEquals( conditionRun.get(), 1 );
assertEquals( effectRun.get(), 0 );
assertEquals( Disposable.isDisposed( watcher ), false );
assertEquals( Disposable.isDisposed( watcher.getObserver() ), false );
assertEquals( Disposable.isDisposed( watcher.getCondition() ), false );
result.set( true );
Disposable.dispose( watcher );
assertEquals( Disposable.isDisposed( watcher ), true );
assertEquals( Disposable.isDisposed( watcher.getObserver() ), true );
assertEquals( Disposable.isDisposed( watcher.getCondition() ), true );
context.action( ValueUtil.randomString(), true, observable::reportChanged );
assertEquals( conditionRun.get(), 1 );
assertEquals( effectRun.get(), 0 );
}
}
| Remove integration test for OnDispose hook in autorun as hook has been removed
| integration-tests/src/test/java/arez/integration/DisposingOnDisposeIntegrationTest.java | Remove integration test for OnDispose hook in autorun as hook has been removed |
||
Java | apache-2.0 | 3c8de927f54e865a2ce9094ffa88998c1195223f | 0 | cirg-up/cilib | src/main/java/net/sourceforge/cilib/util/selection/SamplePredicate.java | /**
* Computational Intelligence Library (CIlib)
* Copyright (C) 2003 - 2010
* Computational Intelligence Research Group (CIRG@UP)
* Department of Computer Science
* University of Pretoria
* South Africa
*
* This library is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this library; if not, see <http://www.gnu.org/licenses/>.
*/
package net.sourceforge.cilib.util.selection;
/**
*
* @author gpampara
*/
public interface SamplePredicate<T> {
/**
*
* @param input
* @param capacity
* @return
*
* @TODO: There must be a way to remove the capacity parameter... ?
*/
boolean apply(T input, int capacity);
boolean isDone();
}
| Deleted one more unused file
| src/main/java/net/sourceforge/cilib/util/selection/SamplePredicate.java | Deleted one more unused file |
||
Java | mit | 7846126862d275220e08eb631d593517d18b2c25 | 0 | amiyajima/voogasalad_VOOGirlsGeneration,mzhu22/TurnBasedStrategy | package authoring_environment;
import java.awt.geom.Point2D;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javafx.event.EventHandler;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.input.MouseEvent;
/**
* The view of the grid especially for selecting the range.
* @author Mengen Huang
*
*/
public class RangeGrid extends SuperGrid{
private static final String DEFAULT_CENTRAL_IMAGE="/resources/images/Patrick.jpeg";
private List<List<SuperTile>> rangeGrid;
private int myWidth;
private int myHeight;
private int centerX;
private int centerY;
private List<Point2D> myRange;
public RangeGrid(int columns, int rows, int tileSize, String shape, List<Point2D> range) {
super(columns, rows, tileSize, shape);
myRange=range;
myWidth=columns;
myHeight=rows;
centerX=myWidth/2;
centerY=myHeight/2;
initGridTiles(shape);
rangeGrid=super.myGrid;
addCenterImage(columns, rows);
highlightRange(range);
addSelectAction();
}
private void addSelectAction() {
for (List<SuperTile> row:rangeGrid){
for (SuperTile tile:row){
tile.setOnMouseClicked(new EventHandler<MouseEvent>() {
@Override
public void handle(MouseEvent event) {
swtichHighlight(tile);
}
});
}
}
}
private void swtichHighlight(SuperTile tile) {
if (tile.ifSelected())
tile.deselectTile();
else
tile.selectTile();
}
private void highlightRange(List<Point2D> range) {
if ((range!=null) && (range.size()>0)){
for (Point2D loc: range){
int col=(int) (loc.getX()+centerX);
int row=(int) (centerY-loc.getY());
if ((col>=0) && (col<=myWidth) && (row>=0) && (row<=myHeight)){
SuperTile tile=findTile(row,col);
tile.selectTile();
}
}
}
}
private SuperTile findTile(int row, int col) {
SuperTile tile=rangeGrid.get(col).get(row);
return tile;
}
private void addCenterImage(int rows,int columns) {
SuperTile centerTile=findCenterTile(columns,rows);
Image centerImage=new Image(getClass().getResourceAsStream(DEFAULT_CENTRAL_IMAGE));
ImageView centerPatrick=new ImageView(centerImage);
centerTile.addPieceImage(centerPatrick);
centerTile.myPieceImage.setVisible(true);
}
private SuperTile findCenterTile(int rows,int columns) {
SuperTile centerTile=findTile(centerX,centerY);
return centerTile;
}
// private void showSelectedRange() {
// for (Point2D position:myRange){
// int x=(int) (position.getX()+centerX);
// int y=(int) (centerY-position.getY());
//// System.out.println(x);
//// System.out.println(y);
// if ((x<=centerX*2) && (y<=centerY*2)){
// sampleGrid.getTile(x,y).selecteTile(true);
// }
// }
// }
//
//
//
// /**
// * Collect all the coordination of selected tiles relative to the center tile
// * as Point2D in a list.
// * @return The list of relative coordination relative to the center tile.
// */
// public List<Point2D> rangeSelectedList(){
// List<Point2D> selectedList=new ArrayList<Point2D>();
//
// for (int i=0;i<sampleGrid.getGridWidth();i++) {
// for (int j=0;j<sampleGrid.getGridHeight();j++) {
// if(sampleGrid.getGridTiles()[i][j].getSelected()){
// selectedList.add(new Point2D.Double(i-centerX,centerY-j));
//// System.out.println((i-centerX)+","+(centerY-j));
// }
// }
// }
// myRange=selectedList;
//// test=4;
//// System.out.println("t="+ test);
// return selectedList;
//
// }
//
// public void rangeColumn(int column,boolean toChoose){
// for (int i=0;i<sampleGrid.getGridHeight();i++) {
// sampleGrid.getTile(centerX+column, i).selecteTile(toChoose);
// }
// }
//
// public void rangeRow(int row,boolean toChoose){
// for (int i=0;i<sampleGrid.getGridWidth();i++) {
// sampleGrid.getTile(i, centerY-row).selecteTile(toChoose);
// }
// }
//
// public void rangeRadius(int radius,boolean toChoose){
// for (int i=(centerX-radius);i<=(centerX+radius);i++){
// for (int j=(centerY-radius);j<=(centerY+radius);j++){
// sampleGrid.getTile(i, j).selecteTile(toChoose);
// }
// }
// }
//
// public void rangeAll(boolean toChoose) {
// for (int i=0;i<sampleGrid.getGridWidth();i++) {
// for (int j=0;j<sampleGrid.getGridHeight();j++) {
// sampleGrid.getTile(i, j).selecteTile(toChoose);
// }
// }
// }
//
// private void sampleSelected() {
// for (Tile[] line : sampleGrid.getGridTiles()) {
// for (Tile tile : line) {
// tile.setOnMouseClicked(new EventHandler<MouseEvent>() {
// @Override
// public void handle(MouseEvent event) {
// tile.switchSelected();
// }
// });
// }
// }
// }
public void setRange(List<Point2D> range){
myRange=range;
}
public List<Point2D> getRange(){
return myRange;
}
}
| src/authoring_environment/RangeGrid.java | package authoring_environment;
import java.awt.geom.Point2D;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javafx.event.EventHandler;
import javafx.scene.image.Image;
import javafx.scene.input.MouseEvent;
/**
* The view of the grid especially for selecting the range.
* @author Mengen Huang
*
*/
public class RangeGrid extends SuperGrid{
private static final String DEFAULT_CENTRAL_IMAGE="/resources/images/Patrick.jpeg";
private List<List<SuperTile>> rangeGrid;
private int myWidth;
private int myHeight;
private int centerX;
private int centerY;
private List<Point2D> myRange;
public RangeGrid(int columns, int rows, int tileSize, String shape, List<Point2D> range) {
super(columns, rows, tileSize, shape);
myRange=range;
myWidth=columns;
myHeight=rows;
centerX=myWidth/2;
centerY=myHeight/2;
initGridTiles(shape);
rangeGrid=super.myGrid;
addCenterImage(columns, rows);
highlightRange(range);
addSelectAction();
}
private void addSelectAction() {
for (List<SuperTile> row:rangeGrid){
for (SuperTile tile:row){
tile.setOnMouseClicked(new EventHandler<MouseEvent>() {
@Override
public void handle(MouseEvent event) {
swtichHighlight(tile);
}
});
}
}
}
private void swtichHighlight(SuperTile tile) {
if (tile.ifSelected())
tile.deselectTile();
else
tile.selectTile();
}
private void highlightRange(List<Point2D> range) {
if ((range!=null) && (range.size()>0)){
for (Point2D loc: range){
int col=(int) (loc.getX()+centerX);
int row=(int) (centerY-loc.getY());
if ((col>=0) && (col<=myWidth) && (row>=0) && (row<=myHeight)){
SuperTile tile=findTile(row,col);
tile.selectTile();
}
}
}
}
private SuperTile findTile(int row, int col) {
SuperTile tile=rangeGrid.get(col).get(row);
return tile;
}
private void addCenterImage(int rows,int columns) {
SuperTile centerTile=findCenterTile(columns,rows);
Image centerImage=new Image(getClass().getResourceAsStream(DEFAULT_CENTRAL_IMAGE));
centerTile.addPiece(centerImage);
centerTile.myPieceImage.setVisible(true);
}
private SuperTile findCenterTile(int rows,int columns) {
SuperTile centerTile=findTile(centerX,centerY);
return centerTile;
}
// private void showSelectedRange() {
// for (Point2D position:myRange){
// int x=(int) (position.getX()+centerX);
// int y=(int) (centerY-position.getY());
//// System.out.println(x);
//// System.out.println(y);
// if ((x<=centerX*2) && (y<=centerY*2)){
// sampleGrid.getTile(x,y).selecteTile(true);
// }
// }
// }
//
//
//
// /**
// * Collect all the coordination of selected tiles relative to the center tile
// * as Point2D in a list.
// * @return The list of relative coordination relative to the center tile.
// */
// public List<Point2D> rangeSelectedList(){
// List<Point2D> selectedList=new ArrayList<Point2D>();
//
// for (int i=0;i<sampleGrid.getGridWidth();i++) {
// for (int j=0;j<sampleGrid.getGridHeight();j++) {
// if(sampleGrid.getGridTiles()[i][j].getSelected()){
// selectedList.add(new Point2D.Double(i-centerX,centerY-j));
//// System.out.println((i-centerX)+","+(centerY-j));
// }
// }
// }
// myRange=selectedList;
//// test=4;
//// System.out.println("t="+ test);
// return selectedList;
//
// }
//
// public void rangeColumn(int column,boolean toChoose){
// for (int i=0;i<sampleGrid.getGridHeight();i++) {
// sampleGrid.getTile(centerX+column, i).selecteTile(toChoose);
// }
// }
//
// public void rangeRow(int row,boolean toChoose){
// for (int i=0;i<sampleGrid.getGridWidth();i++) {
// sampleGrid.getTile(i, centerY-row).selecteTile(toChoose);
// }
// }
//
// public void rangeRadius(int radius,boolean toChoose){
// for (int i=(centerX-radius);i<=(centerX+radius);i++){
// for (int j=(centerY-radius);j<=(centerY+radius);j++){
// sampleGrid.getTile(i, j).selecteTile(toChoose);
// }
// }
// }
//
// public void rangeAll(boolean toChoose) {
// for (int i=0;i<sampleGrid.getGridWidth();i++) {
// for (int j=0;j<sampleGrid.getGridHeight();j++) {
// sampleGrid.getTile(i, j).selecteTile(toChoose);
// }
// }
// }
//
// private void sampleSelected() {
// for (Tile[] line : sampleGrid.getGridTiles()) {
// for (Tile tile : line) {
// tile.setOnMouseClicked(new EventHandler<MouseEvent>() {
// @Override
// public void handle(MouseEvent event) {
// tile.switchSelected();
// }
// });
// }
// }
// }
public void setRange(List<Point2D> range){
myRange=range;
}
public List<Point2D> getRange(){
return myRange;
}
}
| chanaddPieceImage takes in ImageView so Patrck is in an ImageView now
| src/authoring_environment/RangeGrid.java | chanaddPieceImage takes in ImageView so Patrck is in an ImageView now |
|
Java | mit | 9d0f27d855079cd5df1a65fcd2c2bcfd6bb24f34 | 0 | taxi-wind/logbook,taxi-wind/logbook | package logbook.constants;
import java.io.File;
import java.net.URI;
import java.nio.charset.Charset;
import java.util.TimeZone;
import org.eclipse.swt.graphics.RGB;
/**
* アプリケーションで使用する共通の定数クラス
*
*/
public class AppConstants {
/** アプリケーション名 */
public static final String NAME = "航海日誌";
/** バージョン */
public static final String VERSION = "1.3.7";
/** ホームページ */
public static final URI HOME_PAGE_URI = URI.create("http://nekopanda.blog.jp/");
/** アップデートチェック先 */
public static final URI UPDATE_CHECK_URI = URI
.create("https://googledrive.com/host/0B83tioTzFacPdnlSRW5vcEVFUFk/okversions.txt");
/** 日付書式 */
public static final String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss";
/** 日付書式(時刻のみ) */
public static final String DATE_SHORT_FORMAT = "HH:mm:ss";
/** 日付書式(日付のみ) */
public static final String DATE_DAYS_FORMAT = "yyyy-MM-dd";
/** 日付書式(ミリ秒を含む) */
public static final String DATE_LONG_FORMAT = "yyyy-MM-dd HH-mm-ss.SSS";
/** タイムゾーン(任務が更新される05:00JSTに0:00になるタイムゾーン) */
public static final TimeZone TIME_ZONE_MISSION = TimeZone.getTimeZone("GMT+04:00");
/** 戦闘ログファイルの名前 */
public static final String BATTLE_LOGFILE_DATE_FORMAT = DATE_DAYS_FORMAT;
/** 疲労赤色 */
public static final int COND_RED = 20;
/** 疲労オレンジ色 */
public static final int COND_ORANGE = 30;
/** 疲労緑色 */
public static final int COND_GREEN = 50;
/** 疲労緑色 */
public static final int COND_YELLOW = 75;
/** 遠征色 */
public static final RGB MISSION_COLOR = new RGB(102, 51, 255);
/** 入渠色 */
public static final RGB NDOCK_COLOR = new RGB(0, 102, 153);
/** 疲労赤色 */
public static final RGB COND_RED_COLOR = new RGB(255, 16, 0);
/** 疲労オレンジ色 */
public static final RGB COND_ORANGE_COLOR = new RGB(255, 140, 0);
/** 疲労緑色 */
public static final RGB COND_GREEN_COLOR = new RGB(0, 128, 0);
/** テーブルの疲労度色 */
public static final RGB[] COND_TABLE_LOCOR = new RGB[] {
new RGB(122, 206, 255), // #7aceff (100-85)
new RGB(146, 255, 255), // #92ffff (84-76)
new RGB(137, 240, 171), // #89f0ab (75-63)
new RGB(170, 255, 143), // #aaff8f (62-50)
new RGB(228, 255, 220), // #e4ffdc (49-40)
new RGB(254, 207, 143), // #fecf8f (39-30)
new RGB(252, 137, 94), // #fc895e (29-20)
new RGB(253, 140, 143) // #fd8c8f (19-0)
};
public static final int[] COND_TABLE = new int[] {
85,
76,
63,
50,
40,
30,
20,
0
};
public static final RGB[] CHART_COLOR_TABLE = new RGB[] {
new RGB(0x00, 0x80, 0x00), // 燃料
new RGB(0x66, 0x33, 0x00), // 弾薬
new RGB(0x80, 0x80, 0x80), // 鋼材
new RGB(0xCC, 0x33, 0x00), // ボーキ
new RGB(0xA5, 0x2A, 0x2A), // バーナー
new RGB(0xF0, 0x80, 0x80), // バケツ
new RGB(0x48, 0x76, 0xFF), // 開発
new RGB(0x00, 0xF5, 0xFF) // ネジ
};
/** 小破の色 */
public static final RGB SYOHA_SHIP_COLOR = new RGB(210, 255, 0);
/** 轟沈の色 */
public static final RGB SUNK_SHIP_COLOR = new RGB(77, 166, 223);
/** 敗北の色 */
public static final RGB LOSE_BATTLE_COLOR = new RGB(230, 10, 20);
/** 5分前 */
public static final RGB TIME_IN_5_MIN = new RGB(255, 215, 0);
/** 10分前 */
public static final RGB TIME_IN_10_MIN = new RGB(255, 239, 153);
/** 20分前 */
public static final RGB TIME_IN_20_MIN = new RGB(255, 247, 203);
/** テーブル行(偶数行)背景色 */
public static final RGB ROW_BACKGROUND = new RGB(246, 246, 246);
/** 小破(75%) */
public static final float SLIGHT_DAMAGE = 0.75f;
/** 中破(50%) */
public static final float HALF_DAMAGE = 0.5f;
/** 大破(25%) */
public static final float BADLY_DAMAGE = 0.25f;
/** 補給(少) */
public static final float LOW_SUPPLY = 0.77f;
/** 補給(空) */
public static final float EMPTY_SUPPLY = 0.33f;
/** 艦載機装備アイテムのタイプID */
public static final int[] PLANE_ITEM_TYPES = new int[] { 6, 7, 8, 9, 10, 11, 25, 26 };
/** 文字コード(Shift_JIS) */
public static final Charset CHARSET = Charset.forName("MS932");
/** アプリケーション設定ファイル */
public static final File APP_CONFIG_FILE = new File("./config/internal.xml");
/** 艦娘設定ファイル */
public static final File SHIP_CONFIG_FILE = new File("./config/ship.xml");
/** 装備一覧設定ファイル */
public static final File ITEM_CONFIG_FILE = new File("./config/item.xml");
/** 装備マスター設定ファイル */
public static final File ITEM_MST_CONFIG_FILE = new File("./config/itemmst.xml");
/** マスターゲームデータ保存ファイル */
public static final File MASTER_DATA_CONFIG = new File("./config/master.xml");
/** 建造ドック設定ファイル */
public static final File KDOCK_CONFIG_FILE = new File("./config/kdock.xml");
/** 所有艦娘グループ設定ファイル */
public static final File GROUP_CONFIG_FILE = new File("./config/group.xml");
/** 敵データファイル */
public static final File ENEMY_DATA_FILE = new File("./config/KCRDB-enemyid.csv");
/** 出撃ログ表示用CSSファイル */
public static final File BATTLE_LOG_CSS_FILE = new File("./templates/battle-log.css");
/** 保有資材:燃料 */
public static final int MATERIAL_FUEL = 1;
/** 保有資材:弾薬 */
public static final int MATERIAL_AMMO = 2;
/** 保有資材:鋼材 */
public static final int MATERIAL_METAL = 3;
/** 保有資材:ボーキサイト */
public static final int MATERIAL_BAUXITE = 4;
/** 保有資材:バーナー */
public static final int MATERIAL_BURNER = 5;
/** 保有資材:高速修復材 */
public static final int MATERIAL_BUCKET = 6;
/** 保有資材:開発資材 */
public static final int MATERIAL_RESEARCH = 7;
/** 保有資材:ネジ */
public static final int MATERIAL_SCREW = 8;
/** 報告書のオンメモリ記憶数 */
public static final int MAX_LOG_SIZE = 5000;
public static final int USEITEM_UNKNOWN = -1;
public static final int USEITEM_BUCKET = 1;
public static final int USEITEM_BURNER = 2;
public static final int USEITEM_RESEARCH = 3;
public static final int USEITEM_SCREW = 4;
/** /resources/icon/add.png */
public static final String R_ICON_ADD = "/resources/icon/add.png";
/** /resources/icon/delete.png */
public static final String R_ICON_DELETE = "/resources/icon/delete.png";
/** /resources/icon/error.png */
public static final String R_ICON_ERROR = "/resources/icon/error.png";
/** /resources/icon/exclamation.png */
public static final String R_ICON_EXCLAMATION = "/resources/icon/exclamation.png";
/** /resources/icon/folder_star.png */
public static final String R_ICON_FOLDER_STAR = "/resources/icon/folder_star.png";
/** /resources/icon/star.png */
public static final String R_ICON_STAR = "/resources/icon/star.png";
/** /resources/icon/heart.png */
public static final String R_ICON_LOCKED = "/resources/icon/heart.png";
/** /resources/hpgauge/0.png */
public static final String R_HPGAUGE_0 = "/resources/hpgauge/0.png";
/** /resources/hpgauge/1.png */
public static final String R_HPGAUGE_1 = "/resources/hpgauge/1.png";
/** /resources/hpgauge/2.png */
public static final String R_HPGAUGE_2 = "/resources/hpgauge/2.png";
/** /resources/hpgauge/3.png */
public static final String R_HPGAUGE_3 = "/resources/hpgauge/3.png";
/** /resources/hpgauge/4.png */
public static final String R_HPGAUGE_4 = "/resources/hpgauge/4.png";
/** /resources/hpgauge/5.png */
public static final String R_HPGAUGE_5 = "/resources/hpgauge/5.png";
/** /resources/hpgauge/6.png */
public static final String R_HPGAUGE_6 = "/resources/hpgauge/6.png";
/** /resources/hpgauge/7.png */
public static final String R_HPGAUGE_7 = "/resources/hpgauge/7.png";
/** /resources/hpgauge/8.png */
public static final String R_HPGAUGE_8 = "/resources/hpgauge/8.png";
/** /resources/hpgauge/9.png */
public static final String R_HPGAUGE_9 = "/resources/hpgauge/9.png";
/** /resources/hpgauge/10.png */
public static final String R_HPGAUGE_10 = "/resources/hpgauge/10.png";
/** /resources/hpgauge/11.png */
public static final String R_HPGAUGE_11 = "/resources/hpgauge/11.png";
/** /resources/hpgauge/12.png */
public static final String R_HPGAUGE_12 = "/resources/hpgauge/12.png";
/** /resources/hpgauge/13.png */
public static final String R_HPGAUGE_13 = "/resources/hpgauge/13.png";
/** /resources/hpgauge/14.png */
public static final String R_HPGAUGE_14 = "/resources/hpgauge/14.png";
/** /resources/hpgauge/15.png */
public static final String R_HPGAUGE_15 = "/resources/hpgauge/15.png";
/** /resources/hpgauge/16.png */
public static final String R_HPGAUGE_16 = "/resources/hpgauge/16.png";
/** /resources/hpgauge/17.png */
public static final String R_HPGAUGE_17 = "/resources/hpgauge/17.png";
/** /resources/hpgauge/18.png */
public static final String R_HPGAUGE_18 = "/resources/hpgauge/18.png";
/** /resources/hpgauge/19.png */
public static final String R_HPGAUGE_19 = "/resources/hpgauge/19.png";
/** /resources/hpgauge/20.png */
public static final String R_HPGAUGE_20 = "/resources/hpgauge/20.png";
/** /resources/hpgauge/21.png */
public static final String R_HPGAUGE_21 = "/resources/hpgauge/21.png";
/** /resources/hpgauge/22.png */
public static final String R_HPGAUGE_22 = "/resources/hpgauge/22.png";
/** /resources/hpgauge/23.png */
public static final String R_HPGAUGE_23 = "/resources/hpgauge/23.png";
/** /resources/hpgauge/24.png */
public static final String R_HPGAUGE_24 = "/resources/hpgauge/24.png";
/** /resources/hpgauge/25.png */
public static final String R_HPGAUGE_25 = "/resources/hpgauge/25.png";
/** /resources/hpgauge/26.png */
public static final String R_HPGAUGE_26 = "/resources/hpgauge/26.png";
/** /resources/hpgauge/27.png */
public static final String R_HPGAUGE_27 = "/resources/hpgauge/27.png";
/** /resources/hpgauge/28.png */
public static final String R_HPGAUGE_28 = "/resources/hpgauge/28.png";
/** /resources/hpgauge/29.png */
public static final String R_HPGAUGE_29 = "/resources/hpgauge/29.png";
/** /resources/hpgauge/30.png */
public static final String R_HPGAUGE_30 = "/resources/hpgauge/30.png";
/** /resources/hpgauge/31.png */
public static final String R_HPGAUGE_31 = "/resources/hpgauge/31.png";
/** /resources/hpgauge/32.png */
public static final String R_HPGAUGE_32 = "/resources/hpgauge/32.png";
/** /resources/hpgauge/33.png */
public static final String R_HPGAUGE_33 = "/resources/hpgauge/33.png";
/** /resources/hpgauge/34.png */
public static final String R_HPGAUGE_34 = "/resources/hpgauge/34.png";
/** /resources/hpgauge/35.png */
public static final String R_HPGAUGE_35 = "/resources/hpgauge/35.png";
/** /resources/hpgauge/36.png */
public static final String R_HPGAUGE_36 = "/resources/hpgauge/36.png";
/** /resources/hpgauge/37.png */
public static final String R_HPGAUGE_37 = "/resources/hpgauge/37.png";
/** /resources/hpgauge/38.png */
public static final String R_HPGAUGE_38 = "/resources/hpgauge/38.png";
/** /resources/hpgauge/39.png */
public static final String R_HPGAUGE_39 = "/resources/hpgauge/39.png";
/** /resources/hpgauge/40.png */
public static final String R_HPGAUGE_40 = "/resources/hpgauge/40.png";
/** /resources/hpgauge/41.png */
public static final String R_HPGAUGE_41 = "/resources/hpgauge/41.png";
/** /resources/hpgauge/42.png */
public static final String R_HPGAUGE_42 = "/resources/hpgauge/42.png";
/** /resources/hpgauge/43.png */
public static final String R_HPGAUGE_43 = "/resources/hpgauge/43.png";
/** /resources/hpgauge/44.png */
public static final String R_HPGAUGE_44 = "/resources/hpgauge/44.png";
/** /resources/hpgauge/45.png */
public static final String R_HPGAUGE_45 = "/resources/hpgauge/45.png";
/** /resources/hpgauge/46.png */
public static final String R_HPGAUGE_46 = "/resources/hpgauge/46.png";
/** /resources/hpgauge/47.png */
public static final String R_HPGAUGE_47 = "/resources/hpgauge/47.png";
/** /resources/hpgauge/48.png */
public static final String R_HPGAUGE_48 = "/resources/hpgauge/48.png";
/** /resources/hpgauge/49.png */
public static final String R_HPGAUGE_49 = "/resources/hpgauge/49.png";
/** /resources/hpgauge/50.png */
public static final String R_HPGAUGE_50 = "/resources/hpgauge/50.png";
/** 航海日誌のロゴ */
public static final String LOGO = "/resources/logo.png";
/** HPゲージイメージ */
public static final String[] R_HPGAUGE_IMAGES = { AppConstants.R_HPGAUGE_0, AppConstants.R_HPGAUGE_1,
AppConstants.R_HPGAUGE_2, AppConstants.R_HPGAUGE_3, AppConstants.R_HPGAUGE_4, AppConstants.R_HPGAUGE_5,
AppConstants.R_HPGAUGE_6, AppConstants.R_HPGAUGE_7, AppConstants.R_HPGAUGE_8, AppConstants.R_HPGAUGE_9,
AppConstants.R_HPGAUGE_10, AppConstants.R_HPGAUGE_11, AppConstants.R_HPGAUGE_12, AppConstants.R_HPGAUGE_13,
AppConstants.R_HPGAUGE_14, AppConstants.R_HPGAUGE_15, AppConstants.R_HPGAUGE_16, AppConstants.R_HPGAUGE_17,
AppConstants.R_HPGAUGE_18, AppConstants.R_HPGAUGE_19, AppConstants.R_HPGAUGE_20, AppConstants.R_HPGAUGE_21,
AppConstants.R_HPGAUGE_22, AppConstants.R_HPGAUGE_23, AppConstants.R_HPGAUGE_24, AppConstants.R_HPGAUGE_25,
AppConstants.R_HPGAUGE_26, AppConstants.R_HPGAUGE_27, AppConstants.R_HPGAUGE_28, AppConstants.R_HPGAUGE_29,
AppConstants.R_HPGAUGE_30, AppConstants.R_HPGAUGE_31, AppConstants.R_HPGAUGE_32, AppConstants.R_HPGAUGE_33,
AppConstants.R_HPGAUGE_34, AppConstants.R_HPGAUGE_35, AppConstants.R_HPGAUGE_36, AppConstants.R_HPGAUGE_37,
AppConstants.R_HPGAUGE_38, AppConstants.R_HPGAUGE_39, AppConstants.R_HPGAUGE_40, AppConstants.R_HPGAUGE_41,
AppConstants.R_HPGAUGE_42, AppConstants.R_HPGAUGE_43, AppConstants.R_HPGAUGE_44, AppConstants.R_HPGAUGE_45,
AppConstants.R_HPGAUGE_46, AppConstants.R_HPGAUGE_47, AppConstants.R_HPGAUGE_48, AppConstants.R_HPGAUGE_49,
AppConstants.R_HPGAUGE_50 };
/** 艦隊タブの艦娘ラベルに設定するツールチップテキスト */
public static final String TOOLTIP_FLEETTAB_SHIP = "HP:{0}/{1} 燃料:{2}/{3} 弾:{4}/{5}\nNext:{6}exp";
/** メッセージ 出撃できます。 */
public static final String MESSAGE_GOOD = "出撃できます。";
/** メッセージ 進撃できます。 */
public static final String MESSAGE_GO_NEXT = "進撃できます。";
/** メッセージ {0} 出撃はできません。 */
public static final String MESSAGE_BAD = "{0} 出撃はできません。";
/** メッセージ 出撃中です。 */
public static final String MESSAGE_SORTIE = "出撃中です。";
/** メッセージ 連合艦隊に */
public static final String MESSAGE_IN_COMBINED = "連合艦隊に";
/** メッセージ 大破している艦娘がいます */
public static final String MESSAGE_BADLY_DAMAGE = "大破している艦娘がいます";
/** メッセージ 入渠中の艦娘がいます */
public static final String MESSAGE_BATHWATER = "入渠中の艦娘がいます";
/** メッセージ 遠征中です。 */
public static final String MESSAGE_MISSION = "遠征中です。";
/** メッセージ 疲労している艦娘がいます */
public static final String MESSAGE_COND = "疲労している艦娘がいます {0}頃に回復します。";
/** メッセージ 大破している艦娘がいます */
public static final String MESSAGE_STOP_SORTIE = "大破している艦娘がいます、進撃はできません。";
/** メッセージ 連合艦隊 */
public static final String MESSAGE_COMBINED = "連合艦隊編成中。";
/** メッセージ 制空値:{0} */
public static final String MESSAGE_SEIKU = "制空値:{0}。";
/** メッセージ 索敵:{0}+{1} */
public static final String MESSAGE_SAKUTEKI = "索敵:{0}。";
/** メッセージ 艦隊合計Lv:{0} */
public static final String MESSAGE_TOTAL_LV = "艦隊合計Lv:{0}。";
/** Push 通知のアプリケーション名*/
public static final String PUSH_NOTIFY_APPNAME = "航海日誌";
/** Prowl のアクセス先 URI */
public static final String PUSH_NOTIFY_PROWL_URI = "https://api.prowlapp.com/publicapi/add";
/** NMA のアクセス先 URI */
public static final String PUSH_NOTIFY_NMA_URI = "https://www.notifymyandroid.com/publicapi/notify";
/** im.kayac.com のアクセス先 URI */
public static final String PUSH_NOTIFY_IMKAYAC_URI = "http://im.kayac.com/api/post/";
/** メッセージ ドラム缶:{0} ({1}隻) */
public static final String MESSAGE_TOTAL_DRAM = "ドラム缶:{0} ({1}隻)。";
/** メッセージ 大発:{0} (+{1}%) */
public static final String MESSAGE_TOTAL_DAIHATSU = "大発:{0} (+{1}%)";
/** タイトルバーに表示するデフォルトテキスト */
public static final String TITLEBAR_TEXT = "航海日誌拡張版 " + VERSION;
/** 海戦・ドロップ報告書.csv */
public static final String LOG_BATTLE_RESULT = "海戦・ドロップ報告書.csv";
/** 海戦・ドロップ報告書_alternativefile.csv */
public static final String LOG_BATTLE_RESULT_ALT = "海戦・ドロップ報告書_alternativefile.csv";
/** 建造報告書.csv */
public static final String LOG_CREATE_SHIP = "建造報告書.csv";
/** 建造報告書_alternativefile.csv */
public static final String LOG_CREATE_SHIP_ALT = "建造報告書_alternativefile.csv";
/** 開発報告書.csv */
public static final String LOG_CREATE_ITEM = "開発報告書.csv";
/** 開発報告書_alternativefile.csv */
public static final String LOG_CREATE_ITEM_ALT = "開発報告書_alternativefile.csv";
/** 遠征報告書.csv */
public static final String LOG_MISSION = "遠征報告書.csv";
/** 遠征報告書.csv */
public static final String LOG_MISSION_ALT = "遠征報告書_alternativefile.csv";
/** 資材ログ.csv */
public static final String LOG_RESOURCE = "資材ログ.csv";
/** 資材ログ_alternativefile.csv */
public static final String LOG_RESOURCE_ALT = "資材ログ_alternativefile.csv";
} | main/logbook/constants/AppConstants.java | package logbook.constants;
import java.io.File;
import java.net.URI;
import java.nio.charset.Charset;
import java.util.TimeZone;
import org.eclipse.swt.graphics.RGB;
/**
* アプリケーションで使用する共通の定数クラス
*
*/
public class AppConstants {
/** アプリケーション名 */
public static final String NAME = "航海日誌";
/** バージョン */
public static final String VERSION = "1.3.6";
/** ホームページ */
public static final URI HOME_PAGE_URI = URI.create("http://nekopanda.blog.jp/");
/** アップデートチェック先 */
public static final URI UPDATE_CHECK_URI = URI
.create("https://googledrive.com/host/0B83tioTzFacPdnlSRW5vcEVFUFk/okversions.txt");
/** 日付書式 */
public static final String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss";
/** 日付書式(時刻のみ) */
public static final String DATE_SHORT_FORMAT = "HH:mm:ss";
/** 日付書式(日付のみ) */
public static final String DATE_DAYS_FORMAT = "yyyy-MM-dd";
/** 日付書式(ミリ秒を含む) */
public static final String DATE_LONG_FORMAT = "yyyy-MM-dd HH-mm-ss.SSS";
/** タイムゾーン(任務が更新される05:00JSTに0:00になるタイムゾーン) */
public static final TimeZone TIME_ZONE_MISSION = TimeZone.getTimeZone("GMT+04:00");
/** 戦闘ログファイルの名前 */
public static final String BATTLE_LOGFILE_DATE_FORMAT = DATE_DAYS_FORMAT;
/** 疲労赤色 */
public static final int COND_RED = 20;
/** 疲労オレンジ色 */
public static final int COND_ORANGE = 30;
/** 疲労緑色 */
public static final int COND_GREEN = 50;
/** 疲労緑色 */
public static final int COND_YELLOW = 75;
/** 遠征色 */
public static final RGB MISSION_COLOR = new RGB(102, 51, 255);
/** 入渠色 */
public static final RGB NDOCK_COLOR = new RGB(0, 102, 153);
/** 疲労赤色 */
public static final RGB COND_RED_COLOR = new RGB(255, 16, 0);
/** 疲労オレンジ色 */
public static final RGB COND_ORANGE_COLOR = new RGB(255, 140, 0);
/** 疲労緑色 */
public static final RGB COND_GREEN_COLOR = new RGB(0, 128, 0);
/** テーブルの疲労度色 */
public static final RGB[] COND_TABLE_LOCOR = new RGB[] {
new RGB(122, 206, 255), // #7aceff (100-85)
new RGB(146, 255, 255), // #92ffff (84-76)
new RGB(137, 240, 171), // #89f0ab (75-63)
new RGB(170, 255, 143), // #aaff8f (62-50)
new RGB(228, 255, 220), // #e4ffdc (49-40)
new RGB(254, 207, 143), // #fecf8f (39-30)
new RGB(252, 137, 94), // #fc895e (29-20)
new RGB(253, 140, 143) // #fd8c8f (19-0)
};
public static final int[] COND_TABLE = new int[] {
85,
76,
63,
50,
40,
30,
20,
0
};
public static final RGB[] CHART_COLOR_TABLE = new RGB[] {
new RGB(0x00, 0x80, 0x00), // 燃料
new RGB(0x66, 0x33, 0x00), // 弾薬
new RGB(0x80, 0x80, 0x80), // 鋼材
new RGB(0xCC, 0x33, 0x00), // ボーキ
new RGB(0xA5, 0x2A, 0x2A), // バーナー
new RGB(0xF0, 0x80, 0x80), // バケツ
new RGB(0x48, 0x76, 0xFF), // 開発
new RGB(0x00, 0xF5, 0xFF) // ネジ
};
/** 小破の色 */
public static final RGB SYOHA_SHIP_COLOR = new RGB(210, 255, 0);
/** 轟沈の色 */
public static final RGB SUNK_SHIP_COLOR = new RGB(77, 166, 223);
/** 敗北の色 */
public static final RGB LOSE_BATTLE_COLOR = new RGB(230, 10, 20);
/** 5分前 */
public static final RGB TIME_IN_5_MIN = new RGB(255, 215, 0);
/** 10分前 */
public static final RGB TIME_IN_10_MIN = new RGB(255, 239, 153);
/** 20分前 */
public static final RGB TIME_IN_20_MIN = new RGB(255, 247, 203);
/** テーブル行(偶数行)背景色 */
public static final RGB ROW_BACKGROUND = new RGB(246, 246, 246);
/** 小破(75%) */
public static final float SLIGHT_DAMAGE = 0.75f;
/** 中破(50%) */
public static final float HALF_DAMAGE = 0.5f;
/** 大破(25%) */
public static final float BADLY_DAMAGE = 0.25f;
/** 補給(少) */
public static final float LOW_SUPPLY = 0.77f;
/** 補給(空) */
public static final float EMPTY_SUPPLY = 0.33f;
/** 艦載機装備アイテムのタイプID */
public static final int[] PLANE_ITEM_TYPES = new int[] { 6, 7, 8, 9, 10, 11, 25, 26 };
/** 文字コード(Shift_JIS) */
public static final Charset CHARSET = Charset.forName("MS932");
/** アプリケーション設定ファイル */
public static final File APP_CONFIG_FILE = new File("./config/internal.xml");
/** 艦娘設定ファイル */
public static final File SHIP_CONFIG_FILE = new File("./config/ship.xml");
/** 装備一覧設定ファイル */
public static final File ITEM_CONFIG_FILE = new File("./config/item.xml");
/** 装備マスター設定ファイル */
public static final File ITEM_MST_CONFIG_FILE = new File("./config/itemmst.xml");
/** マスターゲームデータ保存ファイル */
public static final File MASTER_DATA_CONFIG = new File("./config/master.xml");
/** 建造ドック設定ファイル */
public static final File KDOCK_CONFIG_FILE = new File("./config/kdock.xml");
/** 所有艦娘グループ設定ファイル */
public static final File GROUP_CONFIG_FILE = new File("./config/group.xml");
/** 敵データファイル */
public static final File ENEMY_DATA_FILE = new File("./config/KCRDB-enemyid.csv");
/** 出撃ログ表示用CSSファイル */
public static final File BATTLE_LOG_CSS_FILE = new File("./templates/battle-log.css");
/** 保有資材:燃料 */
public static final int MATERIAL_FUEL = 1;
/** 保有資材:弾薬 */
public static final int MATERIAL_AMMO = 2;
/** 保有資材:鋼材 */
public static final int MATERIAL_METAL = 3;
/** 保有資材:ボーキサイト */
public static final int MATERIAL_BAUXITE = 4;
/** 保有資材:バーナー */
public static final int MATERIAL_BURNER = 5;
/** 保有資材:高速修復材 */
public static final int MATERIAL_BUCKET = 6;
/** 保有資材:開発資材 */
public static final int MATERIAL_RESEARCH = 7;
/** 保有資材:ネジ */
public static final int MATERIAL_SCREW = 8;
/** 報告書のオンメモリ記憶数 */
public static final int MAX_LOG_SIZE = 5000;
public static final int USEITEM_UNKNOWN = -1;
public static final int USEITEM_BUCKET = 1;
public static final int USEITEM_BURNER = 2;
public static final int USEITEM_RESEARCH = 3;
public static final int USEITEM_SCREW = 4;
/** /resources/icon/add.png */
public static final String R_ICON_ADD = "/resources/icon/add.png";
/** /resources/icon/delete.png */
public static final String R_ICON_DELETE = "/resources/icon/delete.png";
/** /resources/icon/error.png */
public static final String R_ICON_ERROR = "/resources/icon/error.png";
/** /resources/icon/exclamation.png */
public static final String R_ICON_EXCLAMATION = "/resources/icon/exclamation.png";
/** /resources/icon/folder_star.png */
public static final String R_ICON_FOLDER_STAR = "/resources/icon/folder_star.png";
/** /resources/icon/star.png */
public static final String R_ICON_STAR = "/resources/icon/star.png";
/** /resources/icon/heart.png */
public static final String R_ICON_LOCKED = "/resources/icon/heart.png";
/** /resources/hpgauge/0.png */
public static final String R_HPGAUGE_0 = "/resources/hpgauge/0.png";
/** /resources/hpgauge/1.png */
public static final String R_HPGAUGE_1 = "/resources/hpgauge/1.png";
/** /resources/hpgauge/2.png */
public static final String R_HPGAUGE_2 = "/resources/hpgauge/2.png";
/** /resources/hpgauge/3.png */
public static final String R_HPGAUGE_3 = "/resources/hpgauge/3.png";
/** /resources/hpgauge/4.png */
public static final String R_HPGAUGE_4 = "/resources/hpgauge/4.png";
/** /resources/hpgauge/5.png */
public static final String R_HPGAUGE_5 = "/resources/hpgauge/5.png";
/** /resources/hpgauge/6.png */
public static final String R_HPGAUGE_6 = "/resources/hpgauge/6.png";
/** /resources/hpgauge/7.png */
public static final String R_HPGAUGE_7 = "/resources/hpgauge/7.png";
/** /resources/hpgauge/8.png */
public static final String R_HPGAUGE_8 = "/resources/hpgauge/8.png";
/** /resources/hpgauge/9.png */
public static final String R_HPGAUGE_9 = "/resources/hpgauge/9.png";
/** /resources/hpgauge/10.png */
public static final String R_HPGAUGE_10 = "/resources/hpgauge/10.png";
/** /resources/hpgauge/11.png */
public static final String R_HPGAUGE_11 = "/resources/hpgauge/11.png";
/** /resources/hpgauge/12.png */
public static final String R_HPGAUGE_12 = "/resources/hpgauge/12.png";
/** /resources/hpgauge/13.png */
public static final String R_HPGAUGE_13 = "/resources/hpgauge/13.png";
/** /resources/hpgauge/14.png */
public static final String R_HPGAUGE_14 = "/resources/hpgauge/14.png";
/** /resources/hpgauge/15.png */
public static final String R_HPGAUGE_15 = "/resources/hpgauge/15.png";
/** /resources/hpgauge/16.png */
public static final String R_HPGAUGE_16 = "/resources/hpgauge/16.png";
/** /resources/hpgauge/17.png */
public static final String R_HPGAUGE_17 = "/resources/hpgauge/17.png";
/** /resources/hpgauge/18.png */
public static final String R_HPGAUGE_18 = "/resources/hpgauge/18.png";
/** /resources/hpgauge/19.png */
public static final String R_HPGAUGE_19 = "/resources/hpgauge/19.png";
/** /resources/hpgauge/20.png */
public static final String R_HPGAUGE_20 = "/resources/hpgauge/20.png";
/** /resources/hpgauge/21.png */
public static final String R_HPGAUGE_21 = "/resources/hpgauge/21.png";
/** /resources/hpgauge/22.png */
public static final String R_HPGAUGE_22 = "/resources/hpgauge/22.png";
/** /resources/hpgauge/23.png */
public static final String R_HPGAUGE_23 = "/resources/hpgauge/23.png";
/** /resources/hpgauge/24.png */
public static final String R_HPGAUGE_24 = "/resources/hpgauge/24.png";
/** /resources/hpgauge/25.png */
public static final String R_HPGAUGE_25 = "/resources/hpgauge/25.png";
/** /resources/hpgauge/26.png */
public static final String R_HPGAUGE_26 = "/resources/hpgauge/26.png";
/** /resources/hpgauge/27.png */
public static final String R_HPGAUGE_27 = "/resources/hpgauge/27.png";
/** /resources/hpgauge/28.png */
public static final String R_HPGAUGE_28 = "/resources/hpgauge/28.png";
/** /resources/hpgauge/29.png */
public static final String R_HPGAUGE_29 = "/resources/hpgauge/29.png";
/** /resources/hpgauge/30.png */
public static final String R_HPGAUGE_30 = "/resources/hpgauge/30.png";
/** /resources/hpgauge/31.png */
public static final String R_HPGAUGE_31 = "/resources/hpgauge/31.png";
/** /resources/hpgauge/32.png */
public static final String R_HPGAUGE_32 = "/resources/hpgauge/32.png";
/** /resources/hpgauge/33.png */
public static final String R_HPGAUGE_33 = "/resources/hpgauge/33.png";
/** /resources/hpgauge/34.png */
public static final String R_HPGAUGE_34 = "/resources/hpgauge/34.png";
/** /resources/hpgauge/35.png */
public static final String R_HPGAUGE_35 = "/resources/hpgauge/35.png";
/** /resources/hpgauge/36.png */
public static final String R_HPGAUGE_36 = "/resources/hpgauge/36.png";
/** /resources/hpgauge/37.png */
public static final String R_HPGAUGE_37 = "/resources/hpgauge/37.png";
/** /resources/hpgauge/38.png */
public static final String R_HPGAUGE_38 = "/resources/hpgauge/38.png";
/** /resources/hpgauge/39.png */
public static final String R_HPGAUGE_39 = "/resources/hpgauge/39.png";
/** /resources/hpgauge/40.png */
public static final String R_HPGAUGE_40 = "/resources/hpgauge/40.png";
/** /resources/hpgauge/41.png */
public static final String R_HPGAUGE_41 = "/resources/hpgauge/41.png";
/** /resources/hpgauge/42.png */
public static final String R_HPGAUGE_42 = "/resources/hpgauge/42.png";
/** /resources/hpgauge/43.png */
public static final String R_HPGAUGE_43 = "/resources/hpgauge/43.png";
/** /resources/hpgauge/44.png */
public static final String R_HPGAUGE_44 = "/resources/hpgauge/44.png";
/** /resources/hpgauge/45.png */
public static final String R_HPGAUGE_45 = "/resources/hpgauge/45.png";
/** /resources/hpgauge/46.png */
public static final String R_HPGAUGE_46 = "/resources/hpgauge/46.png";
/** /resources/hpgauge/47.png */
public static final String R_HPGAUGE_47 = "/resources/hpgauge/47.png";
/** /resources/hpgauge/48.png */
public static final String R_HPGAUGE_48 = "/resources/hpgauge/48.png";
/** /resources/hpgauge/49.png */
public static final String R_HPGAUGE_49 = "/resources/hpgauge/49.png";
/** /resources/hpgauge/50.png */
public static final String R_HPGAUGE_50 = "/resources/hpgauge/50.png";
/** 航海日誌のロゴ */
public static final String LOGO = "/resources/logo.png";
/** HPゲージイメージ */
public static final String[] R_HPGAUGE_IMAGES = { AppConstants.R_HPGAUGE_0, AppConstants.R_HPGAUGE_1,
AppConstants.R_HPGAUGE_2, AppConstants.R_HPGAUGE_3, AppConstants.R_HPGAUGE_4, AppConstants.R_HPGAUGE_5,
AppConstants.R_HPGAUGE_6, AppConstants.R_HPGAUGE_7, AppConstants.R_HPGAUGE_8, AppConstants.R_HPGAUGE_9,
AppConstants.R_HPGAUGE_10, AppConstants.R_HPGAUGE_11, AppConstants.R_HPGAUGE_12, AppConstants.R_HPGAUGE_13,
AppConstants.R_HPGAUGE_14, AppConstants.R_HPGAUGE_15, AppConstants.R_HPGAUGE_16, AppConstants.R_HPGAUGE_17,
AppConstants.R_HPGAUGE_18, AppConstants.R_HPGAUGE_19, AppConstants.R_HPGAUGE_20, AppConstants.R_HPGAUGE_21,
AppConstants.R_HPGAUGE_22, AppConstants.R_HPGAUGE_23, AppConstants.R_HPGAUGE_24, AppConstants.R_HPGAUGE_25,
AppConstants.R_HPGAUGE_26, AppConstants.R_HPGAUGE_27, AppConstants.R_HPGAUGE_28, AppConstants.R_HPGAUGE_29,
AppConstants.R_HPGAUGE_30, AppConstants.R_HPGAUGE_31, AppConstants.R_HPGAUGE_32, AppConstants.R_HPGAUGE_33,
AppConstants.R_HPGAUGE_34, AppConstants.R_HPGAUGE_35, AppConstants.R_HPGAUGE_36, AppConstants.R_HPGAUGE_37,
AppConstants.R_HPGAUGE_38, AppConstants.R_HPGAUGE_39, AppConstants.R_HPGAUGE_40, AppConstants.R_HPGAUGE_41,
AppConstants.R_HPGAUGE_42, AppConstants.R_HPGAUGE_43, AppConstants.R_HPGAUGE_44, AppConstants.R_HPGAUGE_45,
AppConstants.R_HPGAUGE_46, AppConstants.R_HPGAUGE_47, AppConstants.R_HPGAUGE_48, AppConstants.R_HPGAUGE_49,
AppConstants.R_HPGAUGE_50 };
/** 艦隊タブの艦娘ラベルに設定するツールチップテキスト */
public static final String TOOLTIP_FLEETTAB_SHIP = "HP:{0}/{1} 燃料:{2}/{3} 弾:{4}/{5}\nNext:{6}exp";
/** メッセージ 出撃できます。 */
public static final String MESSAGE_GOOD = "出撃できます。";
/** メッセージ 進撃できます。 */
public static final String MESSAGE_GO_NEXT = "進撃できます。";
/** メッセージ {0} 出撃はできません。 */
public static final String MESSAGE_BAD = "{0} 出撃はできません。";
/** メッセージ 出撃中です。 */
public static final String MESSAGE_SORTIE = "出撃中です。";
/** メッセージ 連合艦隊に */
public static final String MESSAGE_IN_COMBINED = "連合艦隊に";
/** メッセージ 大破している艦娘がいます */
public static final String MESSAGE_BADLY_DAMAGE = "大破している艦娘がいます";
/** メッセージ 入渠中の艦娘がいます */
public static final String MESSAGE_BATHWATER = "入渠中の艦娘がいます";
/** メッセージ 遠征中です。 */
public static final String MESSAGE_MISSION = "遠征中です。";
/** メッセージ 疲労している艦娘がいます */
public static final String MESSAGE_COND = "疲労している艦娘がいます {0}頃に回復します。";
/** メッセージ 大破している艦娘がいます */
public static final String MESSAGE_STOP_SORTIE = "大破している艦娘がいます、進撃はできません。";
/** メッセージ 連合艦隊 */
public static final String MESSAGE_COMBINED = "連合艦隊編成中。";
/** メッセージ 制空値:{0} */
public static final String MESSAGE_SEIKU = "制空値:{0}。";
/** メッセージ 索敵:{0}+{1} */
public static final String MESSAGE_SAKUTEKI = "索敵:{0}。";
/** メッセージ 艦隊合計Lv:{0} */
public static final String MESSAGE_TOTAL_LV = "艦隊合計Lv:{0}。";
/** Push 通知のアプリケーション名*/
public static final String PUSH_NOTIFY_APPNAME = "航海日誌";
/** Prowl のアクセス先 URI */
public static final String PUSH_NOTIFY_PROWL_URI = "https://api.prowlapp.com/publicapi/add";
/** NMA のアクセス先 URI */
public static final String PUSH_NOTIFY_NMA_URI = "https://www.notifymyandroid.com/publicapi/notify";
/** im.kayac.com のアクセス先 URI */
public static final String PUSH_NOTIFY_IMKAYAC_URI = "http://im.kayac.com/api/post/";
/** メッセージ ドラム缶:{0} ({1}隻) */
public static final String MESSAGE_TOTAL_DRAM = "ドラム缶:{0} ({1}隻)。";
/** メッセージ 大発:{0} (+{1}%) */
public static final String MESSAGE_TOTAL_DAIHATSU = "大発:{0} (+{1}%)";
/** タイトルバーに表示するデフォルトテキスト */
public static final String TITLEBAR_TEXT = "航海日誌拡張版 " + VERSION;
/** 海戦・ドロップ報告書.csv */
public static final String LOG_BATTLE_RESULT = "海戦・ドロップ報告書.csv";
/** 海戦・ドロップ報告書_alternativefile.csv */
public static final String LOG_BATTLE_RESULT_ALT = "海戦・ドロップ報告書_alternativefile.csv";
/** 建造報告書.csv */
public static final String LOG_CREATE_SHIP = "建造報告書.csv";
/** 建造報告書_alternativefile.csv */
public static final String LOG_CREATE_SHIP_ALT = "建造報告書_alternativefile.csv";
/** 開発報告書.csv */
public static final String LOG_CREATE_ITEM = "開発報告書.csv";
/** 開発報告書_alternativefile.csv */
public static final String LOG_CREATE_ITEM_ALT = "開発報告書_alternativefile.csv";
/** 遠征報告書.csv */
public static final String LOG_MISSION = "遠征報告書.csv";
/** 遠征報告書.csv */
public static final String LOG_MISSION_ALT = "遠征報告書_alternativefile.csv";
/** 資材ログ.csv */
public static final String LOG_RESOURCE = "資材ログ.csv";
/** 資材ログ_alternativefile.csv */
public static final String LOG_RESOURCE_ALT = "資材ログ_alternativefile.csv";
} | ver.1.3.7
| main/logbook/constants/AppConstants.java | ver.1.3.7 |
|
Java | mit | 1177a1027a5d1d7b0ebb8e4fe4f93b4792da60d5 | 0 | mnicky/bible4j | package com.github.mnicky.bible4j.data;
import static org.testng.Assert.assertEquals;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.github.mnicky.bible4j.data.BibleBook;
import com.github.mnicky.bible4j.data.Note.NoteType;
import com.github.mnicky.bible4j.data.Position;
/**
* Unit tests for Note class.
*/
public final class Note_Test {
private Note n1, n2, n3, n4;
@BeforeMethod
public void beforeMethod() {
n1 = new Note("some note text", new Position(BibleBook.ACTS, 2, 8), NoteType.COMMENTARY);
n2 = new Note("some note text", new Position(BibleBook.ACTS, 2, 8), NoteType.USER_NOTE);
n3 = new Note("some note text", new Position(BibleBook.ACTS, 2, 8), NoteType.USER_NOTE);
n4 = new Note("some note text", new Position(BibleBook.ACTS, 2, 9), NoteType.USER_NOTE);
}
@Test
public void testToString() {
String exp = "some note text - ACTS 2,8 (COMMENTARY)";
String tested = n1.toString();
assertEquals(tested, exp);
}
//TODO add more equals() tests
@Test
public void testEqualsForTrue() {
boolean exp = true;
boolean act = n3.equals(n2);
assertEquals(act, exp);
}
@Test
public void testEqualsForFalse() {
boolean exp = false;
boolean act = n1.equals(n2);
assertEquals(act, exp);
}
@Test
public void testHashCodeForConsistency() {
int h2 = n2.hashCode();
int h3 = n3.hashCode();
Assert.assertEquals(h2 == h3, true);
}
@Test
public void testHashCodeForConsistencyWithEqualsForTrue() {
int h2 = n2.hashCode();
int h3 = n3.hashCode();
boolean b = n2.equals(n3);
Assert.assertEquals(h2 == h3, b);
}
@Test
public void testHashCodeForConsistencyWithEqualsForFalse() {
int h1 = n1.hashCode();
int h3 = n3.hashCode();
boolean b = n1.equals(n3);
Assert.assertEquals(h1 == h3, b);
}
@Test
public void compareToShouldCompareNotesByTheirPositions() {
Assert.assertEquals(n4.compareTo(n3), 1);
Assert.assertEquals(n3.compareTo(n4), -1);
Assert.assertEquals(n3.compareTo(n3), 0);
}
@Test
public void shouldReturnNoteTypeConformingSpecifiedChar() {
Assert.assertEquals(Note.getNoteTypeByChar('c'), Note.NoteType.COMMENTARY);
Assert.assertEquals(Note.getNoteTypeByChar('U'), Note.NoteType.USER_NOTE);
}
@Test
public void shouldReturnFirstCharOfSpecifiedNoteType() {
Assert.assertEquals(Note.NoteType.COMMENTARY.getSpecifyingChar(), 'c');
Assert.assertEquals(Note.NoteType.USER_NOTE.getSpecifyingChar(), 'u');
}
}
| src/test/java/com/github/mnicky/bible4j/data/Note_Test.java | package com.github.mnicky.bible4j.data;
import static org.testng.Assert.assertEquals;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.github.mnicky.bible4j.data.BibleBook;
import com.github.mnicky.bible4j.data.Note.NoteType;
import com.github.mnicky.bible4j.data.Position;
/**
* Unit tests for Note class.
*/
public final class Note_Test {
private Note n1, n2, n3;
@BeforeMethod
public void beforeMethod() {
n1 = new Note("some note text", new Position(BibleBook.ACTS, 2, 8), NoteType.COMMENTARY);
n2 = new Note("some note text", new Position(BibleBook.ACTS, 2, 8), NoteType.USER_NOTE);
n3 = new Note("some note text", new Position(BibleBook.ACTS, 2, 8), NoteType.USER_NOTE);
}
@Test
public void testToString() {
String exp = "some note text - ACTS 2,8 (COMMENTARY)";
String tested = n1.toString();
assertEquals(tested, exp);
}
//TODO add more equals() tests
@Test
public void testEqualsForTrue() {
boolean exp = true;
boolean act = n3.equals(n2);
assertEquals(act, exp);
}
@Test
public void testEqualsForFalse() {
boolean exp = false;
boolean act = n1.equals(n2);
assertEquals(act, exp);
}
@Test
public void testHashCodeForConsistency() {
int h2 = n2.hashCode();
int h3 = n3.hashCode();
Assert.assertEquals(h2 == h3, true);
}
@Test
public void testHashCodeForConsistencyWithEqualsForTrue() {
int h2 = n2.hashCode();
int h3 = n3.hashCode();
boolean b = n2.equals(n3);
Assert.assertEquals(h2 == h3, b);
}
@Test
public void testHashCodeForConsistencyWithEqualsForFalse() {
int h1 = n1.hashCode();
int h3 = n3.hashCode();
boolean b = n1.equals(n3);
Assert.assertEquals(h1 == h3, b);
}
}
| added unit tests for compareTo(), getNoteTypeByChar(), NoteType.getSpecifyingChar()
| src/test/java/com/github/mnicky/bible4j/data/Note_Test.java | added unit tests for compareTo(), getNoteTypeByChar(), NoteType.getSpecifyingChar() |
|
Java | mit | d72a334d6164477173e92367956484df66d43fb3 | 0 | jimkyndemeyer/js-graphql-intellij-plugin,jimkyndemeyer/js-graphql-intellij-plugin,jimkyndemeyer/js-graphql-intellij-plugin | /*
* Copyright (c) 2018-present, Jim Kynde Meyer
* All rights reserved.
* <p>
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package com.intellij.lang.jsgraphql.ide.validation;
import com.google.common.collect.Lists;
import com.intellij.codeInsight.daemon.impl.quickfix.RenameElementFix;
import com.intellij.codeInspection.InspectionManager;
import com.intellij.codeInspection.LocalQuickFix;
import com.intellij.codeInspection.ProblemDescriptor;
import com.intellij.codeInspection.ProblemHighlightType;
import com.intellij.lang.annotation.AnnotationHolder;
import com.intellij.lang.annotation.Annotator;
import com.intellij.lang.jsgraphql.ide.project.GraphQLPsiSearchHelper;
import com.intellij.lang.jsgraphql.ide.validation.fixes.GraphQLMissingTypeFix;
import com.intellij.lang.jsgraphql.ide.validation.inspections.GraphQLUnresolvedReferenceInspection;
import com.intellij.lang.jsgraphql.psi.GraphQLArgument;
import com.intellij.lang.jsgraphql.psi.GraphQLDirective;
import com.intellij.lang.jsgraphql.psi.GraphQLFieldDefinition;
import com.intellij.lang.jsgraphql.psi.*;
import com.intellij.lang.jsgraphql.schema.GraphQLSchemaUtil;
import com.intellij.lang.jsgraphql.types.schema.*;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.text.EditDistance;
import org.apache.commons.lang.StringUtils;
import org.jetbrains.annotations.NotNull;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import static com.intellij.lang.jsgraphql.ide.validation.inspections.GraphQLInspection.createAnnotation;
public class GraphQLValidationAnnotator implements Annotator {
@Override
public void annotate(@NotNull PsiElement psiElement, @NotNull AnnotationHolder annotationHolder) {
if (psiElement instanceof PsiWhiteSpace || psiElement instanceof PsiFile) {
return;
}
// identifiers - fields, fragment spreads, field arguments, directives, type names, input object fields
if (psiElement instanceof GraphQLIdentifier) {
checkIdentifierReferences(psiElement, annotationHolder);
}
// valid directive location names
if (psiElement instanceof GraphQLDirectiveLocation) {
checkDirectiveLocation(psiElement, annotationHolder);
}
// valid enum value names according to spec
if (psiElement instanceof GraphQLEnumValue) {
checkEnumValues((GraphQLEnumValue) psiElement, annotationHolder);
}
}
private void checkEnumValues(GraphQLEnumValue psiElement, @NotNull AnnotationHolder annotationHolder) {
final GraphQLIdentifier nameIdentifier = psiElement.getNameIdentifier();
final String enumValueName = nameIdentifier.getText();
if ("true".equals(enumValueName) || "false".equals(enumValueName) || "null".equals(enumValueName)) {
createAnnotation(annotationHolder, nameIdentifier, "Enum values can not be named '" + enumValueName + "'");
}
}
private void checkDirectiveLocation(@NotNull PsiElement psiElement, @NotNull AnnotationHolder annotationHolder) {
final PsiReference reference = psiElement.getReference();
if (reference != null && reference.resolve() != null) {
return;
}
createAnnotation(
annotationHolder, psiElement, "Unknown directive location '" + psiElement.getText() + "'",
GraphQLUnresolvedReferenceInspection.class, builder -> builder.highlightType(ProblemHighlightType.LIKE_UNKNOWN_SYMBOL)
);
}
private void checkIdentifierReferences(@NotNull PsiElement element, @NotNull AnnotationHolder annotationHolder) {
Project project = element.getProject();
final PsiReference reference = element.getReference();
if (reference != null && reference.resolve() != null) {
return;
}
final PsiElement parent = element.getParent();
final GraphQLTypeScopeProvider typeScopeProvider = PsiTreeUtil.getParentOfType(parent, GraphQLTypeScopeProvider.class);
com.intellij.lang.jsgraphql.types.schema.GraphQLType typeScope = null;
if (typeScopeProvider != null) {
typeScope = typeScopeProvider.getTypeScope();
if (typeScope != null) {
// unwrap non-nulls and lists for type and field hints
typeScope = GraphQLSchemaUtil.getUnmodifiedType(typeScope);
}
}
String message = null;
// fixes to automatically rename misspelled identifiers
final List<LocalQuickFix> fixes = Lists.newArrayList();
Consumer<List<String>> createFixes = (List<String> suggestions) ->
suggestions.forEach(suggestion -> fixes.add(new RenameElementFix((PsiNamedElement) element, suggestion)));
if (parent instanceof GraphQLField) {
message = "Unknown field \"" + element.getText() + "\"";
if (typeScope != null) {
String definitionType = "";
if (typeScope instanceof GraphQLObjectType) {
definitionType = "object ";
} else if (typeScope instanceof GraphQLInterfaceType) {
definitionType = "interface ";
}
message += " on " + definitionType + "type \"" + GraphQLSchemaUtil.getTypeName(typeScope) + "\"";
final List<String> suggestions = getFieldNameSuggestions(element.getText(), typeScope);
if (suggestions != null && !suggestions.isEmpty()) {
message += ". Did you mean " + formatSuggestions(suggestions) + "?";
createFixes.accept(suggestions);
}
} else {
// no type info available from the parent
message += ": The parent selection or operation does not resolve to a valid schema type";
}
} else if (parent instanceof GraphQLFragmentSpread) {
message = "Unknown fragment spread \"" + element.getText() + "\"";
} else if (parent instanceof GraphQLArgument) {
message = "Unknown argument \"" + element.getText() + "\"";
if (typeScope != null) {
final List<String> suggestions = getArgumentNameSuggestions(element);
if (!suggestions.isEmpty()) {
message += ". Did you mean " + formatSuggestions(suggestions) + "?";
createFixes.accept(suggestions);
}
}
} else if (parent instanceof GraphQLDirective) {
message = "Unknown directive \"" + element.getText() + "\"";
} else if (parent instanceof GraphQLObjectField) {
message = "Unknown field \"" + element.getText() + "\"";
if (typeScope != null) {
message += " on input type \"" + GraphQLSchemaUtil.getTypeName(typeScope) + "\"";
final List<String> suggestions = getFieldNameSuggestions(element.getText(), typeScope);
if (suggestions != null && !suggestions.isEmpty()) {
message += ". Did you mean " + formatSuggestions(suggestions) + "?";
createFixes.accept(suggestions);
}
}
} else if (parent instanceof GraphQLTypeName) {
message = "Unknown type \"" + element.getText() + "\"";
fixes.addAll(GraphQLMissingTypeFix.getApplicableFixes((GraphQLIdentifier) element));
}
if (message == null) {
return;
}
String finalMessage = message;
createAnnotation(annotationHolder, element, message, GraphQLUnresolvedReferenceInspection.class, builder -> {
builder = builder.highlightType(ProblemHighlightType.LIKE_UNKNOWN_SYMBOL);
if (!fixes.isEmpty()) {
final InspectionManager inspectionManager = InspectionManager.getInstance(project);
final ProblemDescriptor problemDescriptor = inspectionManager.createProblemDescriptor(
element,
element,
finalMessage,
ProblemHighlightType.LIKE_UNKNOWN_SYMBOL,
true,
LocalQuickFix.EMPTY_ARRAY
);
for (LocalQuickFix fix : fixes) {
builder = builder.newLocalQuickFix(fix, problemDescriptor).registerFix();
}
}
return builder;
});
}
private List<String> getArgumentNameSuggestions(PsiElement argument) {
final GraphQLField field = PsiTreeUtil.getParentOfType(argument, GraphQLField.class);
final GraphQLIdentifier fieldDefinitionIdentifier = GraphQLPsiSearchHelper.getResolvedReference(field);
if (fieldDefinitionIdentifier != null) {
GraphQLFieldDefinition fieldDefinition = PsiTreeUtil.getParentOfType(fieldDefinitionIdentifier, GraphQLFieldDefinition.class);
if (fieldDefinition != null) {
final GraphQLArgumentsDefinition argumentsDefinition = fieldDefinition.getArgumentsDefinition();
if (argumentsDefinition != null) {
final List<String> argumentNames = Lists.newArrayList();
argumentsDefinition.getInputValueDefinitionList().forEach(arg -> {
if (arg.getName() != null) {
argumentNames.add(arg.getName());
}
});
return getSuggestions(argument.getText(), argumentNames);
}
}
}
return Collections.emptyList();
}
private List<String> getFieldNameSuggestions(String fieldName, com.intellij.lang.jsgraphql.types.schema.GraphQLType typeScope) {
List<String> fieldNames = null;
if (typeScope instanceof GraphQLFieldsContainer) {
fieldNames = ((GraphQLFieldsContainer) typeScope).getFieldDefinitions().stream().map(com.intellij.lang.jsgraphql.types.schema.GraphQLFieldDefinition::getName).collect(Collectors.toList());
} else if (typeScope instanceof GraphQLInputFieldsContainer) {
fieldNames = ((GraphQLInputFieldsContainer) typeScope).getFieldDefinitions().stream().map(GraphQLInputObjectField::getName).collect(Collectors.toList());
}
if (fieldNames != null) {
return getSuggestions(fieldName, fieldNames);
}
return Collections.emptyList();
}
@NotNull
private List<String> getSuggestions(String text, List<String> candidates) {
return candidates.stream()
.map(suggestion -> new Pair<>(suggestion, EditDistance.optimalAlignment(text, suggestion, false)))
.filter(p -> p.second <= 2)
.sorted(Comparator.comparingInt(p -> p.second))
.map(p -> p.first).collect(Collectors.toList());
}
private String formatSuggestions(List<String> suggestions) {
if (suggestions != null && !suggestions.isEmpty()) {
return "\"" + StringUtils.join(suggestions, "\", or \"") + "\"";
}
return null;
}
}
| src/main/com/intellij/lang/jsgraphql/ide/validation/GraphQLValidationAnnotator.java | /*
* Copyright (c) 2018-present, Jim Kynde Meyer
* All rights reserved.
* <p>
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package com.intellij.lang.jsgraphql.ide.validation;
import com.google.common.collect.Lists;
import com.intellij.codeInsight.daemon.impl.quickfix.RenameElementFix;
import com.intellij.codeInspection.InspectionManager;
import com.intellij.codeInspection.LocalQuickFix;
import com.intellij.codeInspection.ProblemDescriptor;
import com.intellij.codeInspection.ProblemHighlightType;
import com.intellij.lang.annotation.AnnotationHolder;
import com.intellij.lang.annotation.Annotator;
import com.intellij.lang.jsgraphql.ide.project.GraphQLPsiSearchHelper;
import com.intellij.lang.jsgraphql.ide.validation.fixes.GraphQLMissingTypeFix;
import com.intellij.lang.jsgraphql.ide.validation.inspections.GraphQLUnresolvedReferenceInspection;
import com.intellij.lang.jsgraphql.psi.GraphQLArgument;
import com.intellij.lang.jsgraphql.psi.GraphQLDirective;
import com.intellij.lang.jsgraphql.psi.GraphQLFieldDefinition;
import com.intellij.lang.jsgraphql.psi.*;
import com.intellij.lang.jsgraphql.schema.GraphQLSchemaUtil;
import com.intellij.lang.jsgraphql.types.schema.*;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.text.EditDistance;
import org.apache.commons.lang.StringUtils;
import org.jetbrains.annotations.NotNull;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import static com.intellij.lang.jsgraphql.ide.validation.inspections.GraphQLInspection.createAnnotation;
public class GraphQLValidationAnnotator implements Annotator {
@Override
public void annotate(@NotNull PsiElement psiElement, @NotNull AnnotationHolder annotationHolder) {
if (psiElement instanceof PsiWhiteSpace || psiElement instanceof PsiFile) {
return;
}
// identifiers - fields, fragment spreads, field arguments, directives, type names, input object fields
if (psiElement instanceof GraphQLIdentifier) {
checkIdentifierReferences(psiElement, annotationHolder);
}
// valid directive location names
if (psiElement instanceof GraphQLDirectiveLocation) {
checkDirectiveLocation(psiElement, annotationHolder);
}
// valid enum value names according to spec
if (psiElement instanceof GraphQLEnumValue) {
checkEnumValues((GraphQLEnumValue) psiElement, annotationHolder);
}
}
private void checkEnumValues(GraphQLEnumValue psiElement, @NotNull AnnotationHolder annotationHolder) {
final GraphQLIdentifier nameIdentifier = psiElement.getNameIdentifier();
final String enumValueName = nameIdentifier.getText();
if ("true".equals(enumValueName) || "false".equals(enumValueName) || "null".equals(enumValueName)) {
createAnnotation(annotationHolder, nameIdentifier, "Enum values can not be named '" + enumValueName + "'");
}
}
private void checkDirectiveLocation(@NotNull PsiElement psiElement, @NotNull AnnotationHolder annotationHolder) {
final PsiReference reference = psiElement.getReference();
if (reference != null && reference.resolve() != null) {
return;
}
createAnnotation(
annotationHolder, psiElement, "Unknown directive location '" + psiElement.getText() + "'",
builder -> builder.highlightType(ProblemHighlightType.LIKE_UNKNOWN_SYMBOL)
);
}
private void checkIdentifierReferences(@NotNull PsiElement element, @NotNull AnnotationHolder annotationHolder) {
Project project = element.getProject();
final PsiReference reference = element.getReference();
if (reference != null && reference.resolve() != null) {
return;
}
final PsiElement parent = element.getParent();
final GraphQLTypeScopeProvider typeScopeProvider = PsiTreeUtil.getParentOfType(parent, GraphQLTypeScopeProvider.class);
com.intellij.lang.jsgraphql.types.schema.GraphQLType typeScope = null;
if (typeScopeProvider != null) {
typeScope = typeScopeProvider.getTypeScope();
if (typeScope != null) {
// unwrap non-nulls and lists for type and field hints
typeScope = GraphQLSchemaUtil.getUnmodifiedType(typeScope);
}
}
String message = null;
// fixes to automatically rename misspelled identifiers
final List<LocalQuickFix> fixes = Lists.newArrayList();
Consumer<List<String>> createFixes = (List<String> suggestions) ->
suggestions.forEach(suggestion -> fixes.add(new RenameElementFix((PsiNamedElement) element, suggestion)));
if (parent instanceof GraphQLField) {
message = "Unknown field \"" + element.getText() + "\"";
if (typeScope != null) {
String definitionType = "";
if (typeScope instanceof GraphQLObjectType) {
definitionType = "object ";
} else if (typeScope instanceof GraphQLInterfaceType) {
definitionType = "interface ";
}
message += " on " + definitionType + "type \"" + GraphQLSchemaUtil.getTypeName(typeScope) + "\"";
final List<String> suggestions = getFieldNameSuggestions(element.getText(), typeScope);
if (suggestions != null && !suggestions.isEmpty()) {
message += ". Did you mean " + formatSuggestions(suggestions) + "?";
createFixes.accept(suggestions);
}
} else {
// no type info available from the parent
message += ": The parent selection or operation does not resolve to a valid schema type";
}
} else if (parent instanceof GraphQLFragmentSpread) {
message = "Unknown fragment spread \"" + element.getText() + "\"";
} else if (parent instanceof GraphQLArgument) {
message = "Unknown argument \"" + element.getText() + "\"";
if (typeScope != null) {
final List<String> suggestions = getArgumentNameSuggestions(element);
if (!suggestions.isEmpty()) {
message += ". Did you mean " + formatSuggestions(suggestions) + "?";
createFixes.accept(suggestions);
}
}
} else if (parent instanceof GraphQLDirective) {
message = "Unknown directive \"" + element.getText() + "\"";
} else if (parent instanceof GraphQLObjectField) {
message = "Unknown field \"" + element.getText() + "\"";
if (typeScope != null) {
message += " on input type \"" + GraphQLSchemaUtil.getTypeName(typeScope) + "\"";
final List<String> suggestions = getFieldNameSuggestions(element.getText(), typeScope);
if (suggestions != null && !suggestions.isEmpty()) {
message += ". Did you mean " + formatSuggestions(suggestions) + "?";
createFixes.accept(suggestions);
}
}
} else if (parent instanceof GraphQLTypeName) {
message = "Unknown type \"" + element.getText() + "\"";
fixes.addAll(GraphQLMissingTypeFix.getApplicableFixes((GraphQLIdentifier) element));
}
if (message == null) {
return;
}
String finalMessage = message;
createAnnotation(annotationHolder, element, message, GraphQLUnresolvedReferenceInspection.class, builder -> {
builder = builder.highlightType(ProblemHighlightType.LIKE_UNKNOWN_SYMBOL);
if (!fixes.isEmpty()) {
final InspectionManager inspectionManager = InspectionManager.getInstance(project);
final ProblemDescriptor problemDescriptor = inspectionManager.createProblemDescriptor(
element,
element,
finalMessage,
ProblemHighlightType.LIKE_UNKNOWN_SYMBOL,
true,
LocalQuickFix.EMPTY_ARRAY
);
for (LocalQuickFix fix : fixes) {
builder = builder.newLocalQuickFix(fix, problemDescriptor).registerFix();
}
}
return builder;
});
}
private List<String> getArgumentNameSuggestions(PsiElement argument) {
final GraphQLField field = PsiTreeUtil.getParentOfType(argument, GraphQLField.class);
final GraphQLIdentifier fieldDefinitionIdentifier = GraphQLPsiSearchHelper.getResolvedReference(field);
if (fieldDefinitionIdentifier != null) {
GraphQLFieldDefinition fieldDefinition = PsiTreeUtil.getParentOfType(fieldDefinitionIdentifier, GraphQLFieldDefinition.class);
if (fieldDefinition != null) {
final GraphQLArgumentsDefinition argumentsDefinition = fieldDefinition.getArgumentsDefinition();
if (argumentsDefinition != null) {
final List<String> argumentNames = Lists.newArrayList();
argumentsDefinition.getInputValueDefinitionList().forEach(arg -> {
if (arg.getName() != null) {
argumentNames.add(arg.getName());
}
});
return getSuggestions(argument.getText(), argumentNames);
}
}
}
return Collections.emptyList();
}
private List<String> getFieldNameSuggestions(String fieldName, com.intellij.lang.jsgraphql.types.schema.GraphQLType typeScope) {
List<String> fieldNames = null;
if (typeScope instanceof GraphQLFieldsContainer) {
fieldNames = ((GraphQLFieldsContainer) typeScope).getFieldDefinitions().stream().map(com.intellij.lang.jsgraphql.types.schema.GraphQLFieldDefinition::getName).collect(Collectors.toList());
} else if (typeScope instanceof GraphQLInputFieldsContainer) {
fieldNames = ((GraphQLInputFieldsContainer) typeScope).getFieldDefinitions().stream().map(GraphQLInputObjectField::getName).collect(Collectors.toList());
}
if (fieldNames != null) {
return getSuggestions(fieldName, fieldNames);
}
return Collections.emptyList();
}
@NotNull
private List<String> getSuggestions(String text, List<String> candidates) {
return candidates.stream()
.map(suggestion -> new Pair<>(suggestion, EditDistance.optimalAlignment(text, suggestion, false)))
.filter(p -> p.second <= 2)
.sorted(Comparator.comparingInt(p -> p.second))
.map(p -> p.first).collect(Collectors.toList());
}
private String formatSuggestions(List<String> suggestions) {
if (suggestions != null && !suggestions.isEmpty()) {
return "\"" + StringUtils.join(suggestions, "\", or \"") + "\"";
}
return null;
}
}
| Unresolved reference for directive locations
| src/main/com/intellij/lang/jsgraphql/ide/validation/GraphQLValidationAnnotator.java | Unresolved reference for directive locations |
|
Java | mit | a7656954d2f519f110ccf821182d80ee63a79b47 | 0 | fredyw/leetcode,fredyw/leetcode,fredyw/leetcode,fredyw/leetcode | package leetcode;
/**
* https://leetcode.com/problems/maximum-xor-for-each-query/
*/
public class Problem1829 {
public int[] getMaximumXor(int[] nums, int maximumBit) {
int[] answer = new int[nums.length];
int prefixXor = 0;
for (int i = 0; i < nums.length; i++) {
prefixXor ^= nums[i];
int k = ((int) Math.pow(2, maximumBit)) - 1;
// a ^ b = c --> a ^ c = b
answer[nums.length - 1 - i] = prefixXor ^ k;
}
return answer;
}
}
| src/main/java/leetcode/Problem1829.java | package leetcode;
import java.util.Arrays;
/**
* https://leetcode.com/problems/maximum-xor-for-each-query/
*/
public class Problem1829 {
public int[] getMaximumXor(int[] nums, int maximumBit) {
// TODO
return null;
}
public static void main(String[] args) {
Problem1829 prob = new Problem1829();
System.out.println(Arrays.toString(prob.getMaximumXor(new int[]{0,1,1,3}, 2))); // [0,3,2,3]
System.out.println(Arrays.toString(prob.getMaximumXor(new int[]{2,3,4,7}, 3))); // [5,2,6,5]
System.out.println(Arrays.toString(prob.getMaximumXor(new int[]{0,1,2,2,5,7}, 3))); // [4,3,6,4,6,7]
}
}
| Solve problem 1829
| src/main/java/leetcode/Problem1829.java | Solve problem 1829 |
|
Java | mit | c9c974959e2d6ae96610d88e886a9cf9f2d18859 | 0 | 15-2505-001-7/eureka | package com.example.v001ff.footmark;
import android.Manifest;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.os.Bundle;
import android.provider.MediaStore;
import android.support.design.widget.Snackbar;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.Toast;
import java.io.ByteArrayOutputStream;
import java.text.SimpleDateFormat;
import java.util.Date;
import io.realm.Realm;
public class AddSpotActivity extends AppCompatActivity {
private Realm mRealm; //このオブジェクトはDB更新に使う
EditText mAddPlaceName; //投稿画面の場所の名前入力部分に対応
EditText mAddReview; //投稿画面のレビュー部分に対応
private EditText mDate; //投稿された日時
String latitudeRef; //画像から取得する緯度
String latitude;
String longitudeRef; //画像から取得する経度
String longitude;
Bitmap capturedImage;
//private long AccountID アカウント機能実装後に、投稿したユーザのIDもデータベースに保存する
static final int REQUEST_CAPTURE_IMAGE = 100;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.fragment_add_spot);
mRealm = Realm.getDefaultInstance(); //Realmを使用する準備。Realmクラスのインスタンスを取得している
mAddReview = (EditText) findViewById(R.id.addReview);
ImageView spot_photo = (ImageView) findViewById(R.id.spot_photo);
spot_photo.setOnClickListener(new View.OnClickListener(){
public void onClick(View view) { //カメラ起動するための処理。試作。
int permissionCheck = ContextCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.CAMERA);
if (permissionCheck != PackageManager.PERMISSION_GRANTED) {
// Android 6.0 のみ、カメラパーミッションが許可されていない場合
final int REQUEST_CODE = 1;
ActivityCompat.requestPermissions(AddSpotActivity.this, new String[]{Manifest.permission.CAMERA}, REQUEST_CODE); //修正予定ですごめんなさい
if (ActivityCompat.shouldShowRequestPermissionRationale(AddSpotActivity.this, Manifest.permission.CAMERA)) { //修正予定ですごめんなさい
// パーミッションが必要であることを明示するアプリケーション独自のUIを表示
Snackbar.make(view, R.string.rationale, Snackbar.LENGTH_LONG).show();
}
} else {
// 許可済みの場合、もしくはAndroid 6.0以前
// パーミッションが必要な処理。以下でカメラ起動。
Intent intent = new Intent();
intent.setAction(MediaStore.ACTION_IMAGE_CAPTURE);
startActivityForResult(intent, REQUEST_CAPTURE_IMAGE);
}
}
});
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data){
if(REQUEST_CAPTURE_IMAGE == requestCode && resultCode == Activity.RESULT_OK){
//capturedImage = (Bitmap) data.getExtras().get("data");
//((ImageView) findViewById(R.id.spot_photo)).setImageBitmap(capturedImage);
Bitmap capturedImage = (Bitmap) data.getExtras().get("data");
ByteArrayOutputStream byteArrayStream = new ByteArrayOutputStream();
capturedImage.compress(Bitmap.CompressFormat.PNG,0,byteArrayStream);
((ImageView) findViewById(R.id.spot_photo)).setImageBitmap(capturedImage);
}
}
public void onPostingButtonTapped(View view) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM/dd"); //日付の取得(この段階ではString型)
Date dateParse = new Date();
//byte[] bytes = MyUtils.getByteFromImage(capturedImage);
/*
try {
dateParse = sdf.parse(mDate.getText().toString());
ExifInterface exifInterface = new ExifInterface(capturedImage.toString()); //p283にRealmでの画像の扱い方書いてるので参照して修正予定
latitudeRef = exifInterface.getAttribute(ExifInterface.TAG_GPS_LATITUDE_REF); //緯度の取得
latitude = exifInterface.getAttribute(ExifInterface.TAG_GPS_LATITUDE);
longitudeRef = exifInterface.getAttribute(ExifInterface.TAG_GPS_LONGITUDE_REF); //経度の取得
longitude = exifInterface.getAttribute(ExifInterface.TAG_GPS_LONGITUDE);
}
catch (Exception ex) {
ex.printStackTrace();
}
*/
//final Date date = dateParse;
mRealm.executeTransaction(new Realm.Transaction(){
@Override
public void execute(Realm realm){
Number maxId = realm.where(FootmarkDataTable.class).max("PlaceId");
long nextId = 0;
if(maxId != null) nextId = maxId.longValue() + 1;
//realm.beginTransaction();
FootmarkDataTable footmarkDataTable = realm.createObject(FootmarkDataTable.class, new Long(nextId));
footmarkDataTable.setReviewBody(mAddReview.getText().toString());
//footmarkDataTable.setPlaceDate(date);
//footmarkDataTable.setLatitude(latitude);
//footmarkDataTable.setLongitude(longitude);
//realm.commitTransaction();
}
});
//ここにRealmにデータ追加する文を書く
Toast.makeText(this, "投稿しました!", Toast.LENGTH_SHORT).show();
startActivity(new Intent(AddSpotActivity.this, MainActivity.class));
}
@Override
public void onDestroy() {
super.onDestroy();
mRealm.close(); //投稿画面から離れるときにDBのリソース開放
}
}
| Footmark/app/src/main/java/com/example/v001ff/footmark/AddSpotActivity.java | package com.example.v001ff.footmark;
import android.Manifest;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.os.Bundle;
import android.provider.MediaStore;
import android.support.design.widget.Snackbar;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.Toast;
import java.io.ByteArrayOutputStream;
import java.text.SimpleDateFormat;
import java.util.Date;
import io.realm.Realm;
public class AddSpotActivity extends AppCompatActivity {
private Realm mRealm; //このオブジェクトはDB更新に使う
EditText mAddPlaceName; //投稿画面の場所の名前入力部分に対応
EditText mAddReview; //投稿画面のレビュー部分に対応
private EditText mDate; //投稿された日時
String latitudeRef; //画像から取得する緯度
String latitude;
String longitudeRef; //画像から取得する経度
String longitude;
Bitmap capturedImage;
//private long AccountID アカウント機能実装後に、投稿したユーザのIDもデータベースに保存する
static final int REQUEST_CAPTURE_IMAGE = 100;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.fragment_add_spot);
mRealm = Realm.getDefaultInstance(); //Realmを使用する準備。Realmクラスのインスタンスを取得している
mAddPlaceName = (EditText) findViewById(R.id.addPlaceName);
mAddReview = (EditText) findViewById(R.id.addReview);
ImageView spot_photo = (ImageView) findViewById(R.id.spot_photo);
spot_photo.setOnClickListener(new View.OnClickListener(){
public void onClick(View view) { //カメラ起動するための処理。試作。
int permissionCheck = ContextCompat.checkSelfPermission(getApplicationContext(), Manifest.permission.CAMERA);
if (permissionCheck != PackageManager.PERMISSION_GRANTED) {
// Android 6.0 のみ、カメラパーミッションが許可されていない場合
final int REQUEST_CODE = 1;
ActivityCompat.requestPermissions(AddSpotActivity.this, new String[]{Manifest.permission.CAMERA}, REQUEST_CODE); //修正予定ですごめんなさい
if (ActivityCompat.shouldShowRequestPermissionRationale(AddSpotActivity.this, Manifest.permission.CAMERA)) { //修正予定ですごめんなさい
// パーミッションが必要であることを明示するアプリケーション独自のUIを表示
Snackbar.make(view, R.string.rationale, Snackbar.LENGTH_LONG).show();
}
} else {
// 許可済みの場合、もしくはAndroid 6.0以前
// パーミッションが必要な処理。以下でカメラ起動。
Intent intent = new Intent();
intent.setAction(MediaStore.ACTION_IMAGE_CAPTURE);
startActivityForResult(intent, REQUEST_CAPTURE_IMAGE);
}
}
});
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data){
if(REQUEST_CAPTURE_IMAGE == requestCode && resultCode == Activity.RESULT_OK){
//capturedImage = (Bitmap) data.getExtras().get("data");
//((ImageView) findViewById(R.id.spot_photo)).setImageBitmap(capturedImage);
Bitmap capturedImage = (Bitmap) data.getExtras().get("data");
ByteArrayOutputStream byteArrayStream = new ByteArrayOutputStream();
capturedImage.compress(Bitmap.CompressFormat.PNG,0,byteArrayStream);
((ImageView) findViewById(R.id.spot_photo)).setImageBitmap(capturedImage);
}
}
public void onPostingButtonTapped(View view) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy/MM/dd"); //日付の取得(この段階ではString型)
Date dateParse = new Date();
//byte[] bytes = MyUtils.getByteFromImage(capturedImage);
/*
try {
dateParse = sdf.parse(mDate.getText().toString());
ExifInterface exifInterface = new ExifInterface(capturedImage.toString()); //p283にRealmでの画像の扱い方書いてるので参照して修正予定
latitudeRef = exifInterface.getAttribute(ExifInterface.TAG_GPS_LATITUDE_REF); //緯度の取得
latitude = exifInterface.getAttribute(ExifInterface.TAG_GPS_LATITUDE);
longitudeRef = exifInterface.getAttribute(ExifInterface.TAG_GPS_LONGITUDE_REF); //経度の取得
longitude = exifInterface.getAttribute(ExifInterface.TAG_GPS_LONGITUDE);
}
catch (Exception ex) {
ex.printStackTrace();
}
*/
//final Date date = dateParse;
mRealm.executeTransaction(new Realm.Transaction(){
@Override
public void execute(Realm realm){
Number maxId = realm.where(FootmarkDataTable.class).max("PlaceId");
long nextId = 0;
if(maxId != null) nextId = maxId.longValue() + 1;
//realm.beginTransaction();
FootmarkDataTable footmarkDataTable = realm.createObject(FootmarkDataTable.class, new Long(nextId));
footmarkDataTable.setPlaceName(mAddPlaceName.getText().toString());
footmarkDataTable.setReviewBody(mAddReview.getText().toString());
//footmarkDataTable.setPlaceDate(date);
//footmarkDataTable.setLatitude(latitude);
//footmarkDataTable.setLongitude(longitude);
//realm.commitTransaction();
}
});
//ここにRealmにデータ追加する文を書く
Toast.makeText(this, "投稿しました!", Toast.LENGTH_SHORT).show();
startActivity(new Intent(AddSpotActivity.this, MainActivity.class));
}
@Override
public void onDestroy() {
super.onDestroy();
mRealm.close(); //投稿画面から離れるときにDBのリソース開放
}
}
| 追加投稿できるようになりました
| Footmark/app/src/main/java/com/example/v001ff/footmark/AddSpotActivity.java | 追加投稿できるようになりました |
|
Java | mit | 8fb5f9c9cc391893a586d6ed73e47ab9bea0b766 | 0 | EulerianTechnologies/eanalytics-android,EulerianTechnologies/eanalytics-android | package com.eulerian.android.sdk;
import org.json.JSONObject;
/**
* Created by Francois Rouault on 07/03/2015.
*/
public class Product {
private static final String KEY_REF = "ref";
private static final String KEY_NAME = "name";
private static final String KEY_PARAMS = "params";
private static final String KEY_GROUP = "group";
private JSONObject mJson;
protected Product(Builder builder) {
mJson = builder.mainJson;
}
public JSONObject getJson() {
return mJson;
}
//-----------
//- BUILDER
//-----------
public static class Builder {
private final JSONObject mainJson = new JSONObject();
public Builder(String reference) {
JSONUtils.put(mainJson, KEY_REF, reference);
}
public Builder setName(String name) {
JSONUtils.put(mainJson, KEY_NAME, name);
return this;
}
public Builder setGroup(String group) {
JSONUtils.put(mainJson, KEY_GROUP, group);
return this;
}
public Builder setParams(Params params) {
JSONUtils.put(mainJson, KEY_PARAMS, params.getJson());
return this;
}
public Product build() {
return new Product(this);
}
}
}
| eanalytics/src/main/java/com/eulerian/android/sdk/Product.java | package com.eulerian.android.sdk;
import org.json.JSONObject;
/**
* Created by Francois Rouault on 07/03/2015.
*/
public class Product {
private static final String KEY_REF = "ref";
private static final String KEY_NAME = "name";
private static final String KEY_PARAMS = "params";
private JSONObject mJson;
protected Product(Builder builder) {
mJson = builder.mainJson;
}
public JSONObject getJson() {
return mJson;
}
//-----------
//- BUILDER
//-----------
public static class Builder {
private final JSONObject mainJson = new JSONObject();
public Builder(String reference) {
JSONUtils.put(mainJson, KEY_REF, reference);
}
public Builder setName(String name) {
JSONUtils.put(mainJson, KEY_NAME, name);
return this;
}
public Builder setParams(Params params) {
JSONUtils.put(mainJson, KEY_PARAMS, params.getJson());
return this;
}
public Product build() {
return new Product(this);
}
}
}
| product : key "group" added
| eanalytics/src/main/java/com/eulerian/android/sdk/Product.java | product : key "group" added |
|
Java | lgpl-2.1 | 38beb79395e1affb378e7a0d999b16aa142e4d4a | 0 | tgvaughan/beast2,CompEvol/beast2,CompEvol/beast2,Anaphory/beast2,tgvaughan/beast2,Anaphory/beast2,Anaphory/beast2,CompEvol/beast2,tgvaughan/beast2,Anaphory/beast2,tgvaughan/beast2,CompEvol/beast2 | package beast.app.tools;
import java.awt.BorderLayout;
import java.awt.Font;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import javax.swing.Icon;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JDialog;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTextField;
import javax.swing.border.EmptyBorder;
import beast.app.BEASTVersion;
import beast.app.util.Utils;
import beast.app.util.WholeNumberField;
import beast.core.util.Log;
import beast.evolution.tree.coalescent.CompoundPopulationFunction;
import beast.evolution.tree.coalescent.CompoundPopulationFunction.Type;
import beast.math.statistic.DiscreteStatistics;
import beast.util.HeapSort;
import jam.console.ConsoleApplication;
import jam.panels.OptionsPanel;
public class EBSPAnalyser {
String m_sFileOut;
PrintStream m_out = System.out;
CompoundPopulationFunction.Type m_type = Type.LINEAR;
String m_sInputFile;
int m_nBurninPercentage = 10;
private void run() throws Exception {
parse(m_sInputFile, m_nBurninPercentage, m_type, m_out);
}
void parse(String fileName, int nBurnInPercentage, CompoundPopulationFunction.Type type, PrintStream out) throws Exception {
logln("Processing " + fileName);
BufferedReader fin = new BufferedReader(new FileReader(fileName));
String str;
int nData = 0;
// first, sweep through the log file to determine size of the log
while (fin.ready()) {
str = fin.readLine();
// terrible hackish code, must improve later
if( str.charAt(0) == '#' ) {
int i = str.indexOf("spec=");
if( i > 0 ) {
if( str.indexOf("type=\"stepwise\"") > 0 ) {
m_type = Type.STEPWISE;
} else if( str.indexOf("type=\"linear\"") > 0 ) {
m_type = Type.LINEAR;
}
}
}
if (str.indexOf('#') < 0 && str.matches(".*[0-9a-zA-Z].*")) {
nData++;
}
}
final int nBurnIn = nData * nBurnInPercentage / 100;
logln(" skipping " + nBurnIn + " line\n\n");
nData = -nBurnIn - 1;
fin.close();
fin = new BufferedReader(new FileReader(fileName));
// process log
final List<List<Double>> times = new ArrayList<>();
final List<List<Double>> popSizes = new ArrayList<>();
double[] alltimes = null;
while (fin.ready()) {
str = fin.readLine();
if (str.indexOf('#') < 0 && str.matches(".*[0-9a-zA-Z].*")) {
if (++nData > 0) {
final String[] strs = str.split("\t");
final List<Double> times2 = new ArrayList<>();
final List<Double> popSizes2 = new ArrayList<>();
if (alltimes == null) {
alltimes = new double[strs.length - 1];
}
for (int i = 1; i < strs.length; i++) {
final String[] strs2 = strs[i].split(":");
final Double time = Double.parseDouble(strs2[0]);
alltimes[i - 1] += time;
if (strs2.length > 1) {
times2.add(time);
popSizes2.add(Double.parseDouble(strs2[1]));
}
}
times.add(times2);
popSizes.add(popSizes2);
}
}
}
if (alltimes == null) {
//burn-in too large?
return;
}
// take average of coalescent times
for (int i = 0; i < alltimes.length; i++) {
alltimes[i] /= times.size();
}
// generate output
out.println("time\tmean\tmedian\t95HPD lower\t95HPD upper");
final double[] popSizeAtTimeT = new double[times.size()];
int[] indices = new int[times.size()];
for (final double time : alltimes) {
for (int j = 0; j < popSizeAtTimeT.length; j++) {
popSizeAtTimeT[j] = calcPopSize(type, times.get(j), popSizes.get(j), time);
}
HeapSort.sort(popSizeAtTimeT, indices);
out.print(time + "\t");
out.print(DiscreteStatistics.mean(popSizeAtTimeT) + "\t");
out.print(DiscreteStatistics.median(popSizeAtTimeT) + "\t");
double[] hpdInterval = DiscreteStatistics.HPDInterval(0.95, popSizeAtTimeT, indices);
out.println(hpdInterval[0] + "\t" + hpdInterval[1]);
}
}
private double calcPopSize(CompoundPopulationFunction.Type type, List<Double> xs, List<Double> ys, double d) {
// TODO completely untested
// assume linear
//assert typeName.equals("Linear");
final int n = xs.size();
final double xn = xs.get(n - 1);
if (d >= xn) {
return ys.get(n - 1);
}
assert d >= xs.get(0);
int i = 1;
while (d >= xs.get(i)) {
++i;
}
// d < xs.get(i)
double x0 = xs.get(i-1);
double x1 = xs.get(i);
double y0 = ys.get(i-1);
double y1 = ys.get(i);
assert x0 <= d && d <= x1 : "" + x0 + "," + x1 + "," + d;
switch (type) {
case LINEAR:
final double p = (d * (y1 - y0) + (y0 * x1 - y1 * x0)) / (x1 - x0);
assert p > 0;
return p;
case STEPWISE:
assert y1 > 0;
return y1;
}
return 0;
}
private void parseArgs(String[] args) {
int i = 0;
try {
while (i < args.length) {
int iOld = i;
if (i < args.length) {
if (args[i].equals("")) {
i += 1;
} else if (args[i].equals("-help") || args[i].equals("-h") || args[i].equals("--help")) {
System.out.println(getUsage());
System.exit(0);
} else if (args[i].equals("-i")) {
m_sInputFile = args[i + 1];
i += 2;
} else if (args[i].equals("-o")) {
m_sFileOut = args[i + 1];
m_out = new PrintStream(m_sFileOut);
i += 2;
} else if (args[i].equals("-type")) {
if (args[i + 1].equals("linear")) {
m_type = Type.LINEAR;
} else if (args[i + 1].equals("stepwise")) {
m_type = Type.STEPWISE;
} else {
throw new IllegalArgumentException("Expected linear or stepwise, not " + args[i + 1]);
}
i += 2;
} else if (args[i].equals("-burnin")) {
m_nBurninPercentage = Integer.parseInt(args[i + 1]);
i += 2;
}
if (i == iOld) {
throw new IllegalArgumentException("Unrecognised argument (argument " + i + ": " + args[i] + ")");
}
}
}
} catch (IllegalArgumentException e) {
throw e;
} catch (Exception e) {
e.printStackTrace();
throw new IllegalArgumentException("Error parsing command line arguments: " + Arrays.toString(args) + "\nArguments ignored\n\n" + getUsage());
}
if (m_sFileOut == null) {
Log.warning.println("No output file specified");
}
}
static String getUsage() {
return "EBSPAnalyse -i <inputfile> [options]\n" +
"analyses trace file generated by EBSP analysis\n" +
"Options are:\n" +
"-i <inputfile> name of input file (required)\n" +
"-burnin <percentage> percent of log to consider burn in, default 10\n" +
"-type [linear|step] type of population function\n" +
"-o <outputfile> name of output file, default to output on stdout\n" +
"";
}
protected void log(String s) {
Log.warning.print(s);
}
protected void logln(String s) {
Log.warning.println(s);
}
private void printTitle(String aboutString) {
aboutString = "LogCombiner" + aboutString.replaceAll("</p>", "\n\n");
aboutString = aboutString.replaceAll("<br>", "\n");
aboutString = aboutString.replaceAll("<[^>]*>", " ");
String[] strs = aboutString.split("\n");
for (String str : strs) {
int n = 80 - str.length();
int n1 = n / 2;
for (int i = 0; i < n1; i++) {
log(" ");
}
logln(str);
}
}
public class EBSPAnalyserDialog {
private final JFrame frame;
private final OptionsPanel optionPanel;
private final JTextField inputFileNameText = new JTextField("not selected", 16);
private final JComboBox<String> typeCombo = new JComboBox<>(new String[]{"linear", "stepwise"});
final WholeNumberField burninText = new WholeNumberField(0, Long.MAX_VALUE);
private final JTextField outputFileNameText = new JTextField("not selected", 16);
private File outputFile = null;
private File inputFile = null;
public EBSPAnalyserDialog(final JFrame frame, String titleString, Icon icon) {
this.frame = frame;
optionPanel = new OptionsPanel(12, 12);
final JLabel titleText = new JLabel(titleString);
titleText.setIcon(icon);
optionPanel.addSpanningComponent(titleText);
titleText.setFont(new Font("sans-serif", 0, 12));
JPanel panel = new JPanel(new BorderLayout());
panel.setOpaque(false);
JButton button = new JButton("Choose Input File...");
button.addActionListener(ae -> {
File file = Utils.getLoadFile("Select input file...");
if (file == null) {
// the dialog was cancelled...
return;
}
inputFile = file;
inputFileNameText.setText(inputFile.getName());
});
inputFileNameText.setEditable(false);
JButton button2 = new JButton("Choose Output File...");
button2.addActionListener(ae -> {
File file = Utils.getSaveFile("Select output file...");
if (file == null) {
// the dialog was cancelled...
return;
}
outputFile = file;
outputFileNameText.setText(outputFile.getName());
});
outputFileNameText.setEditable(false);
JPanel panel1 = new JPanel(new BorderLayout(0, 0));
panel1.add(inputFileNameText, BorderLayout.CENTER);
panel1.add(button, BorderLayout.EAST);
optionPanel.addComponentWithLabel("Input File: ", panel1);
optionPanel.addComponentWithLabel("File type: ", typeCombo);
burninText.setColumns(12);
burninText.setValue(10);
optionPanel.addComponentWithLabel("Burn in percentage: ", burninText);
optionPanel.addSpanningComponent(panel);
JPanel panel3 = new JPanel(new BorderLayout(0, 0));
panel3.add(outputFileNameText, BorderLayout.CENTER);
panel3.add(button2, BorderLayout.EAST);
optionPanel.addComponentWithLabel("Output File: ", panel3);
}
public boolean showDialog(String title) {
JOptionPane optionPane = new JOptionPane(optionPanel,
JOptionPane.PLAIN_MESSAGE,
JOptionPane.OK_CANCEL_OPTION,
null,
new String[]{"Run", "Quit"},
null);
optionPane.setBorder(new EmptyBorder(12, 12, 12, 12));
final JDialog dialog = optionPane.createDialog(frame, title);
//dialog.setResizable(true);
dialog.pack();
dialog.setVisible(true);
return optionPane.getValue().equals("Run");
}
public String getOutputFileName() {
if (outputFile == null) return null;
return outputFile.getPath();
}
public String[] getArgs() {
java.util.List<String> args = new ArrayList<>();
if (inputFile != null) {
args.add("-i");
args.add(inputFile.getPath());
}
args.add("-burnin");
args.add(burninText.getText());
args.add("-type");
args.add(typeCombo.getSelectedItem().toString());
if (outputFile != null) {
args.add("-o");
args.add(outputFile.getPath());
}
return args.toArray(new String[0]);
}
}
/**
* @param args
*/
public static void main(String[] args) {
BEASTVersion version = new BEASTVersion();
final String versionString = version.getVersionString();
String nameString = "EBSP Analyser " + versionString;
String aboutString = "<html><center><p>" + versionString + ", " + version.getDateString() + "</p>" +
"<p>by<br>" +
"<p>Joseph Heled and Remco Bouckaert</p>" +
"<p>Department of Computer Science, University of Auckland<br>" +
"<a href=\"mailto:[email protected]\">[email protected]</a></p>" +
"<a href=\"mailto:[email protected]\">[email protected]</a></p>" +
"<p>Part of the BEAST 2 package:<br>" +
"<a href=\"http://beast2.cs.auckland.ac.nz/\">http://beast2.cs.auckland.ac.nz/</a></p>" +
"</center></html>";
try {
EBSPAnalyser analyser = new EBSPAnalyser();
if (args.length == 0) {
System.setProperty("com.apple.macos.useScreenMenuBar", "true");
System.setProperty("apple.laf.useScreenMenuBar", "true");
System.setProperty("apple.awt.showGrowBox", "true");
// TODO: set up ICON
java.net.URL url = EBSPAnalyser.class.getResource("images/EBSPAnalyser.png");
javax.swing.Icon icon = null;
if (url != null) {
icon = new javax.swing.ImageIcon(url);
}
//ConsoleApplication consoleApp =
new ConsoleApplication(nameString, aboutString, icon, true);
analyser.printTitle(aboutString);
String titleString = "<html><center><p>EBSPAnalyser<br>" +
"Version " + version.getVersionString() + ", " + version.getDateString() + "</p></center></html>";
EBSPAnalyserDialog dialog = analyser.new EBSPAnalyserDialog(new JFrame(), titleString, icon);
if (!dialog.showDialog(nameString)) {
return;
}
String[] args2 = dialog.getArgs();
try {
analyser.parseArgs(args2);
analyser.run();
} catch (Exception ex) {
Log.err.println("Exception: " + ex.getMessage());
ex.printStackTrace();
}
System.out.println("Finished - Quit program to exit.");
while (true) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} else {
analyser.printTitle(aboutString);
analyser.parseArgs(args);
analyser.run();
}
} catch (Exception e) {
System.out.println(getUsage());
e.printStackTrace();
}
}
}
| src/beast/app/tools/EBSPAnalyser.java | package beast.app.tools;
import java.awt.BorderLayout;
import java.awt.Font;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import javax.swing.Icon;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JDialog;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTextField;
import javax.swing.border.EmptyBorder;
import beast.app.BEASTVersion;
import beast.app.util.Utils;
import beast.app.util.WholeNumberField;
import beast.core.util.Log;
import beast.evolution.tree.coalescent.CompoundPopulationFunction;
import beast.evolution.tree.coalescent.CompoundPopulationFunction.Type;
import beast.math.statistic.DiscreteStatistics;
import jam.console.ConsoleApplication;
import jam.panels.OptionsPanel;
public class EBSPAnalyser {
String m_sFileOut;
PrintStream m_out = System.out;
CompoundPopulationFunction.Type m_type = Type.LINEAR;
String m_sInputFile;
int m_nBurninPercentage = 10;
private void run() throws Exception {
parse(m_sInputFile, m_nBurninPercentage, m_type, m_out);
}
void parse(String fileName, int nBurnInPercentage, CompoundPopulationFunction.Type type, PrintStream out) throws Exception {
logln("Processing " + fileName);
BufferedReader fin = new BufferedReader(new FileReader(fileName));
String str;
int nData = 0;
// first, sweep through the log file to determine size of the log
while (fin.ready()) {
str = fin.readLine();
// terrible hackish code, must improve later
if( str.charAt(0) == '#' ) {
int i = str.indexOf("spec=");
if( i > 0 ) {
if( str.indexOf("type=\"stepwise\"") > 0 ) {
m_type = Type.STEPWISE;
} else if( str.indexOf("type=\"linear\"") > 0 ) {
m_type = Type.LINEAR;
}
}
}
if (str.indexOf('#') < 0 && str.matches(".*[0-9a-zA-Z].*")) {
nData++;
}
}
final int nBurnIn = nData * nBurnInPercentage / 100;
logln(" skipping " + nBurnIn + " line\n\n");
nData = -nBurnIn - 1;
fin.close();
fin = new BufferedReader(new FileReader(fileName));
// process log
final List<List<Double>> times = new ArrayList<>();
final List<List<Double>> popSizes = new ArrayList<>();
double[] alltimes = null;
while (fin.ready()) {
str = fin.readLine();
if (str.indexOf('#') < 0 && str.matches(".*[0-9a-zA-Z].*")) {
if (++nData > 0) {
final String[] strs = str.split("\t");
final List<Double> times2 = new ArrayList<>();
final List<Double> popSizes2 = new ArrayList<>();
if (alltimes == null) {
alltimes = new double[strs.length - 1];
}
for (int i = 1; i < strs.length; i++) {
final String[] strs2 = strs[i].split(":");
final Double time = Double.parseDouble(strs2[0]);
alltimes[i - 1] += time;
if (strs2.length > 1) {
times2.add(time);
popSizes2.add(Double.parseDouble(strs2[1]));
}
}
times.add(times2);
popSizes.add(popSizes2);
}
}
}
if (alltimes == null) {
//burn-in too large?
return;
}
// take average of coalescent times
for (int i = 0; i < alltimes.length; i++) {
alltimes[i] /= times.size();
}
// generate output
out.println("time\tmean\tmedian\t95HPD lower\t95HPD upper");
final double[] popSizeAtTimeT = new double[times.size()];
for (final double time : alltimes) {
for (int j = 0; j < popSizeAtTimeT.length; j++) {
popSizeAtTimeT[j] = calcPopSize(type, times.get(j), popSizes.get(j), time);
}
Arrays.sort(popSizeAtTimeT);
out.print(time + "\t");
out.print(DiscreteStatistics.mean(popSizeAtTimeT) + "\t");
out.print(DiscreteStatistics.median(popSizeAtTimeT) + "\t");
out.print(DiscreteStatistics.quantile(0.025, popSizeAtTimeT) + "\t");
out.println(DiscreteStatistics.quantile(0.975, popSizeAtTimeT));
}
}
private double calcPopSize(CompoundPopulationFunction.Type type, List<Double> xs, List<Double> ys, double d) {
// TODO completely untested
// assume linear
//assert typeName.equals("Linear");
final int n = xs.size();
final double xn = xs.get(n - 1);
if (d >= xn) {
return ys.get(n - 1);
}
assert d >= xs.get(0);
int i = 1;
while (d >= xs.get(i)) {
++i;
}
// d < xs.get(i)
double x0 = xs.get(i-1);
double x1 = xs.get(i);
double y0 = ys.get(i-1);
double y1 = ys.get(i);
assert x0 <= d && d <= x1 : "" + x0 + "," + x1 + "," + d;
switch (type) {
case LINEAR:
final double p = (d * (y1 - y0) + (y0 * x1 - y1 * x0)) / (x1 - x0);
assert p > 0;
return p;
case STEPWISE:
assert y1 > 0;
return y1;
}
return 0;
}
private void parseArgs(String[] args) {
int i = 0;
try {
while (i < args.length) {
int iOld = i;
if (i < args.length) {
if (args[i].equals("")) {
i += 1;
} else if (args[i].equals("-help") || args[i].equals("-h") || args[i].equals("--help")) {
System.out.println(getUsage());
System.exit(0);
} else if (args[i].equals("-i")) {
m_sInputFile = args[i + 1];
i += 2;
} else if (args[i].equals("-o")) {
m_sFileOut = args[i + 1];
m_out = new PrintStream(m_sFileOut);
i += 2;
} else if (args[i].equals("-type")) {
if (args[i + 1].equals("linear")) {
m_type = Type.LINEAR;
} else if (args[i + 1].equals("stepwise")) {
m_type = Type.STEPWISE;
} else {
throw new IllegalArgumentException("Expected linear or stepwise, not " + args[i + 1]);
}
i += 2;
} else if (args[i].equals("-burnin")) {
m_nBurninPercentage = Integer.parseInt(args[i + 1]);
i += 2;
}
if (i == iOld) {
throw new IllegalArgumentException("Unrecognised argument (argument " + i + ": " + args[i] + ")");
}
}
}
} catch (IllegalArgumentException e) {
throw e;
} catch (Exception e) {
e.printStackTrace();
throw new IllegalArgumentException("Error parsing command line arguments: " + Arrays.toString(args) + "\nArguments ignored\n\n" + getUsage());
}
if (m_sFileOut == null) {
Log.warning.println("No output file specified");
}
}
static String getUsage() {
return "EBSPAnalyse -i <inputfile> [options]\n" +
"analyses trace file generated by EBSP analysis\n" +
"Options are:\n" +
"-i <inputfile> name of input file (required)\n" +
"-burnin <percentage> percent of log to consider burn in, default 10\n" +
"-type [linear|step] type of population function\n" +
"-o <outputfile> name of output file, default to output on stdout\n" +
"";
}
protected void log(String s) {
Log.warning.print(s);
}
protected void logln(String s) {
Log.warning.println(s);
}
private void printTitle(String aboutString) {
aboutString = "LogCombiner" + aboutString.replaceAll("</p>", "\n\n");
aboutString = aboutString.replaceAll("<br>", "\n");
aboutString = aboutString.replaceAll("<[^>]*>", " ");
String[] strs = aboutString.split("\n");
for (String str : strs) {
int n = 80 - str.length();
int n1 = n / 2;
for (int i = 0; i < n1; i++) {
log(" ");
}
logln(str);
}
}
public class EBSPAnalyserDialog {
private final JFrame frame;
private final OptionsPanel optionPanel;
private final JTextField inputFileNameText = new JTextField("not selected", 16);
private final JComboBox<String> typeCombo = new JComboBox<>(new String[]{"linear", "stepwise"});
final WholeNumberField burninText = new WholeNumberField(0, Long.MAX_VALUE);
private final JTextField outputFileNameText = new JTextField("not selected", 16);
private File outputFile = null;
private File inputFile = null;
public EBSPAnalyserDialog(final JFrame frame, String titleString, Icon icon) {
this.frame = frame;
optionPanel = new OptionsPanel(12, 12);
final JLabel titleText = new JLabel(titleString);
titleText.setIcon(icon);
optionPanel.addSpanningComponent(titleText);
titleText.setFont(new Font("sans-serif", 0, 12));
JPanel panel = new JPanel(new BorderLayout());
panel.setOpaque(false);
JButton button = new JButton("Choose Input File...");
button.addActionListener(ae -> {
File file = Utils.getLoadFile("Select input file...");
if (file == null) {
// the dialog was cancelled...
return;
}
inputFile = file;
inputFileNameText.setText(inputFile.getName());
});
inputFileNameText.setEditable(false);
JButton button2 = new JButton("Choose Output File...");
button2.addActionListener(ae -> {
File file = Utils.getSaveFile("Select output file...");
if (file == null) {
// the dialog was cancelled...
return;
}
outputFile = file;
outputFileNameText.setText(outputFile.getName());
});
outputFileNameText.setEditable(false);
JPanel panel1 = new JPanel(new BorderLayout(0, 0));
panel1.add(inputFileNameText, BorderLayout.CENTER);
panel1.add(button, BorderLayout.EAST);
optionPanel.addComponentWithLabel("Input File: ", panel1);
optionPanel.addComponentWithLabel("File type: ", typeCombo);
burninText.setColumns(12);
burninText.setValue(10);
optionPanel.addComponentWithLabel("Burn in percentage: ", burninText);
optionPanel.addSpanningComponent(panel);
JPanel panel3 = new JPanel(new BorderLayout(0, 0));
panel3.add(outputFileNameText, BorderLayout.CENTER);
panel3.add(button2, BorderLayout.EAST);
optionPanel.addComponentWithLabel("Output File: ", panel3);
}
public boolean showDialog(String title) {
JOptionPane optionPane = new JOptionPane(optionPanel,
JOptionPane.PLAIN_MESSAGE,
JOptionPane.OK_CANCEL_OPTION,
null,
new String[]{"Run", "Quit"},
null);
optionPane.setBorder(new EmptyBorder(12, 12, 12, 12));
final JDialog dialog = optionPane.createDialog(frame, title);
//dialog.setResizable(true);
dialog.pack();
dialog.setVisible(true);
return optionPane.getValue().equals("Run");
}
public String getOutputFileName() {
if (outputFile == null) return null;
return outputFile.getPath();
}
public String[] getArgs() {
java.util.List<String> args = new ArrayList<>();
if (inputFile != null) {
args.add("-i");
args.add(inputFile.getPath());
}
args.add("-burnin");
args.add(burninText.getText());
args.add("-type");
args.add(typeCombo.getSelectedItem().toString());
if (outputFile != null) {
args.add("-o");
args.add(outputFile.getPath());
}
return args.toArray(new String[0]);
}
}
/**
* @param args
*/
public static void main(String[] args) {
BEASTVersion version = new BEASTVersion();
final String versionString = version.getVersionString();
String nameString = "EBSP Analyser " + versionString;
String aboutString = "<html><center><p>" + versionString + ", " + version.getDateString() + "</p>" +
"<p>by<br>" +
"<p>Joseph Heled and Remco Bouckaert</p>" +
"<p>Department of Computer Science, University of Auckland<br>" +
"<a href=\"mailto:[email protected]\">[email protected]</a></p>" +
"<a href=\"mailto:[email protected]\">[email protected]</a></p>" +
"<p>Part of the BEAST 2 package:<br>" +
"<a href=\"http://beast2.cs.auckland.ac.nz/\">http://beast2.cs.auckland.ac.nz/</a></p>" +
"</center></html>";
try {
EBSPAnalyser analyser = new EBSPAnalyser();
if (args.length == 0) {
System.setProperty("com.apple.macos.useScreenMenuBar", "true");
System.setProperty("apple.laf.useScreenMenuBar", "true");
System.setProperty("apple.awt.showGrowBox", "true");
// TODO: set up ICON
java.net.URL url = EBSPAnalyser.class.getResource("images/EBSPAnalyser.png");
javax.swing.Icon icon = null;
if (url != null) {
icon = new javax.swing.ImageIcon(url);
}
//ConsoleApplication consoleApp =
new ConsoleApplication(nameString, aboutString, icon, true);
analyser.printTitle(aboutString);
String titleString = "<html><center><p>EBSPAnalyser<br>" +
"Version " + version.getVersionString() + ", " + version.getDateString() + "</p></center></html>";
EBSPAnalyserDialog dialog = analyser.new EBSPAnalyserDialog(new JFrame(), titleString, icon);
if (!dialog.showDialog(nameString)) {
return;
}
String[] args2 = dialog.getArgs();
try {
analyser.parseArgs(args2);
analyser.run();
} catch (Exception ex) {
Log.err.println("Exception: " + ex.getMessage());
ex.printStackTrace();
}
System.out.println("Finished - Quit program to exit.");
while (true) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} else {
analyser.printTitle(aboutString);
analyser.parseArgs(args);
analyser.run();
}
} catch (Exception e) {
System.out.println(getUsage());
e.printStackTrace();
}
}
}
| EBSPAnlyser now computes 95% HPDs as advertized. Closes #481.
| src/beast/app/tools/EBSPAnalyser.java | EBSPAnlyser now computes 95% HPDs as advertized. Closes #481. |
|
Java | apache-2.0 | 278f31fe366d5d71daff5d6159b658475277d7a8 | 0 | krujos/data-lifecycle-service-broker,krujos/data-lifecycle-service-broker,krujos/data-lifecycle-service-broker,krujos/data-lifecycle-service-broker | package io.pivotal.cdm.model;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
@Entity
public class SanitizationScript {
final static long ID = 1;
@Id
long id = ID;
private String script;
protected SanitizationScript() {
}
public SanitizationScript(final String script) {
this.setScript(script);
}
public String getScript() {
return script;
}
public void setScript(String script) {
this.script = script;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
}
| src/main/java/io/pivotal/cdm/model/SanitizationScript.java | package io.pivotal.cdm.model;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
@Entity
public class SanitizationScript {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
long id;
private String script;
protected SanitizationScript() {
}
public SanitizationScript(final String script) {
this.setScript(script);
}
public String getScript() {
return script;
}
public void setScript(String script) {
this.script = script;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
}
| Fix bug saving script more than once
| src/main/java/io/pivotal/cdm/model/SanitizationScript.java | Fix bug saving script more than once |
|
Java | apache-2.0 | 1da2070f9cae93d2716fef93064057cb223b72d5 | 0 | arnaudsjs/YCSB-couchdb-binding | package couchdbBinding.java;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.ektorp.AttachmentInputStream;
import org.ektorp.CouchDbConnector;
import org.ektorp.CouchDbInstance;
import org.ektorp.DbInfo;
import org.ektorp.DesignDocInfo;
import org.ektorp.DocumentNotFoundException;
import org.ektorp.DocumentOperationResult;
import org.ektorp.Options;
import org.ektorp.Page;
import org.ektorp.PageRequest;
import org.ektorp.PurgeResult;
import org.ektorp.ReplicationStatus;
import org.ektorp.Revision;
import org.ektorp.StreamingChangesResult;
import org.ektorp.StreamingViewResult;
import org.ektorp.UpdateConflictException;
import org.ektorp.UpdateHandlerRequest;
import org.ektorp.ViewQuery;
import org.ektorp.ViewResult;
import org.ektorp.changes.ChangesCommand;
import org.ektorp.changes.ChangesFeed;
import org.ektorp.changes.DocumentChange;
import org.ektorp.http.HttpClient;
import org.ektorp.http.StdHttpClient;
import org.ektorp.impl.StdCouchDbInstance;
/*
* This CouchDbConnector load balances the request to
* the different nodes in the couchdb cluster.
*
* Note: Only the create, get, update and delete methods are implemented.
*
* ***********************************************************************
*
* Copyright 2013 KU Leuven Research and Development - iMinds - Distrinet
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Administrative Contact: [email protected]
* Technical Contact: [email protected]
*/
public class LoadBalancedConnector implements CouchDbConnector{
private final List<CouchDbConnector> connectors;
private int nextConnector;
public LoadBalancedConnector(List<URL> urlsOfNodesInCluster, String databaseName){
if(urlsOfNodesInCluster == null)
throw new IllegalArgumentException("urlsOfNodesInClusterIsNull");
if(urlsOfNodesInCluster.isEmpty())
throw new IllegalArgumentException("At least one node required");
this.connectors = this.createConnectors(urlsOfNodesInCluster, databaseName);
this.nextConnector = 0;
}
private List<CouchDbConnector> createConnectors(List<URL> urlsForConnectors, String databaseName){
List<CouchDbConnector> result = new ArrayList<CouchDbConnector>();
for(URL url : urlsForConnectors){
HttpClient httpClient = new StdHttpClient.Builder().url(url).build();
CouchDbInstance dbInstance = new StdCouchDbInstance(httpClient);
// 2nd paramter true => Create database if not exists
CouchDbConnector dbConnector = dbInstance.createConnector(databaseName, true);
result.add(dbConnector);
}
return result;
}
private void updateNextConnector(){
this.nextConnector = (this.nextConnector+1) % this.connectors.size();
}
private CouchDbConnector getConnector(){
CouchDbConnector result = this.connectors.get(this.nextConnector);
this.updateNextConnector();
return result;
}
private CouchDbConnector getConnectorForMutationOperations(){
return this.connectors.get(0);
}
@Override
public void create(String id, Object o) {
boolean failed = true;
for(int i=0; i<this.connectors.size() && failed; i++){
try{
this.getConnectorForMutationOperations().create(id, o);
failed = false;
} catch(UpdateConflictException exc){
throw exc;
} catch(Exception exc){}
}
if(failed)
throw new NoNodeReacheableException();
}
@Override
public void create(Object o) {
boolean failed = true;
for(int i=0; i<this.connectors.size() && failed; i++){
try{
this.getConnectorForMutationOperations().create(o);
failed = false;
} catch(UpdateConflictException exc){
throw exc;
} catch(Exception exc){}
}
if(failed)
throw new NoNodeReacheableException();
}
@Override
public void update(Object o) {
boolean failed = true;
for(int i=0; i<this.connectors.size() && failed; i++){
try{
this.getConnectorForMutationOperations().update(o);
failed = false;
} catch(UpdateConflictException exc){
throw exc;
} catch(Exception exc){}
}
if(failed)
throw new NoNodeReacheableException();
}
@Override
public String delete(Object o) {
for(int i=0; i<this.connectors.size(); i++){
try{
return this.getConnectorForMutationOperations().delete(o);
} catch(UpdateConflictException exc){
throw exc;
} catch(Exception exc){}
}
throw new NoNodeReacheableException();
}
@Override
public String delete(String id, String revision) {
for(int i=0; i<this.connectors.size(); i++){
try{
return this.getConnectorForMutationOperations().delete(id, revision);
} catch(UpdateConflictException exc){
throw exc;
} catch(Exception exc){}
}
throw new NoNodeReacheableException();
}
@Override
public String copy(String sourceDocId, String targetDocId) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public String copy(String sourceDocId, String targetDocId,
String targetRevision) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public PurgeResult purge(Map<String, List<String>> revisionsToPurge) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public <T> T get(Class<T> c, String id) {
for(int i=0; i<this.connectors.size(); i++){
try{
return this.getConnector().get(c, id);
} catch(DocumentNotFoundException exc){
throw exc;
} catch(Exception exc){}
}
throw new NoNodeReacheableException();
}
@Override
public <T> T get(Class<T> c, String id, Options options) {
for(int i=0; i<this.connectors.size(); i++){
try{
return this.getConnector().get(c, id, options);
} catch(DocumentNotFoundException exc){
throw exc;
} catch(Exception exc){}
}
throw new NoNodeReacheableException();
}
@Override
public <T> T find(Class<T> c, String id) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public <T> T find(Class<T> c, String id, Options options) {
throw new UnsupportedOperationException("not implemented");
}
@Override
@Deprecated
public <T> T get(Class<T> c, String id, String rev) {
throw new UnsupportedOperationException("not implemented");
}
@Override
@Deprecated
public <T> T getWithConflicts(Class<T> c, String id) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public boolean contains(String id) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public InputStream getAsStream(String id) {
throw new UnsupportedOperationException("not implemented");
}
@Override
@Deprecated
public InputStream getAsStream(String id, String rev) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public InputStream getAsStream(String id, Options options) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public List<Revision> getRevisions(String id) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public String getCurrentRevision(String id) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public AttachmentInputStream getAttachment(String id, String attachmentId) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public AttachmentInputStream getAttachment(String id, String attachmentId,
String revision) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public String createAttachment(String docId, AttachmentInputStream data) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public String createAttachment(String docId, String revision,
AttachmentInputStream data) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public String deleteAttachment(String docId, String revision,
String attachmentId) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public List<String> getAllDocIds() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public <T> List<T> queryView(ViewQuery query, Class<T> type) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public <T> Page<T> queryForPage(ViewQuery query, PageRequest pr,
Class<T> type) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public ViewResult queryView(ViewQuery query) {
for(int i=0; i<this.connectors.size(); i++){
try{
return this.getConnectorForMutationOperations().queryView(query);
} catch(Exception exc){}
}
throw new NoNodeReacheableException();
}
@Override
public StreamingViewResult queryForStreamingView(ViewQuery query) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public InputStream queryForStream(ViewQuery query) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public void createDatabaseIfNotExists() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public String getDatabaseName() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public String path() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public HttpClient getConnection() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public DbInfo getDbInfo() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public DesignDocInfo getDesignDocInfo(String designDocId) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public void compact() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public void compactViews(String designDocumentId) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public void cleanupViews() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public int getRevisionLimit() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public void setRevisionLimit(int limit) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public ReplicationStatus replicateFrom(String source) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public ReplicationStatus replicateFrom(String source,
Collection<String> docIds) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public ReplicationStatus replicateTo(String target) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public ReplicationStatus replicateTo(String target,
Collection<String> docIds) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public void addToBulkBuffer(Object o) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public List<DocumentOperationResult> flushBulkBuffer() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public void clearBulkBuffer() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public List<DocumentOperationResult> executeBulk(InputStream inputStream) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public List<DocumentOperationResult> executeAllOrNothing(
InputStream inputStream) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public List<DocumentOperationResult> executeBulk(Collection<?> objects) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public List<DocumentOperationResult> executeAllOrNothing(
Collection<?> objects) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public List<DocumentChange> changes(ChangesCommand cmd) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public StreamingChangesResult changesAsStream(ChangesCommand cmd) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public ChangesFeed changesFeed(ChangesCommand cmd) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public String callUpdateHandler(String designDocID, String function,
String docId) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public String callUpdateHandler(String designDocID, String function,
String docId, Map<String, String> params) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public <T> T callUpdateHandler(UpdateHandlerRequest req, Class<T> c) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public String callUpdateHandler(UpdateHandlerRequest req) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public void ensureFullCommit() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public void updateMultipart(String id, InputStream stream, String boundary,
long length, Options options) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public void update(String id, InputStream document, long length,
Options options) {
boolean failed = true;
for(int i=0; i<this.connectors.size() && failed; i++){
try{
this.getConnectorForMutationOperations().update(id, document, length, options);
failed = false;
} catch(UpdateConflictException exc){
throw exc;
} catch(Exception exc){}
}
if(failed)
throw new NoNodeReacheableException();
}
}
| java/LoadBalancedConnector.java | package couchdbBinding.java;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.ektorp.AttachmentInputStream;
import org.ektorp.CouchDbConnector;
import org.ektorp.CouchDbInstance;
import org.ektorp.DbInfo;
import org.ektorp.DesignDocInfo;
import org.ektorp.DocumentNotFoundException;
import org.ektorp.DocumentOperationResult;
import org.ektorp.Options;
import org.ektorp.Page;
import org.ektorp.PageRequest;
import org.ektorp.PurgeResult;
import org.ektorp.ReplicationStatus;
import org.ektorp.Revision;
import org.ektorp.StreamingChangesResult;
import org.ektorp.StreamingViewResult;
import org.ektorp.UpdateConflictException;
import org.ektorp.UpdateHandlerRequest;
import org.ektorp.ViewQuery;
import org.ektorp.ViewResult;
import org.ektorp.changes.ChangesCommand;
import org.ektorp.changes.ChangesFeed;
import org.ektorp.changes.DocumentChange;
import org.ektorp.http.HttpClient;
import org.ektorp.http.StdHttpClient;
import org.ektorp.impl.StdCouchDbInstance;
/*
* This CouchDbConnector load balances the request to
* the different nodes in the couchdb cluster.
*
* Note: Only the create, get, update and delete methods are implemented.
*
* ***********************************************************************
*
* Copyright 2013 KU Leuven Research and Development - iMinds - Distrinet
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Administrative Contact: [email protected]
* Technical Contact: [email protected]
*/
public class LoadBalancedConnector implements CouchDbConnector{
private final List<CouchDbConnector> connectors;
private int nextConnector;
public LoadBalancedConnector(List<URL> urlsOfNodesInCluster, String databaseName){
if(urlsOfNodesInCluster == null)
throw new IllegalArgumentException("urlsOfNodesInClusterIsNull");
if(urlsOfNodesInCluster.isEmpty())
throw new IllegalArgumentException("At least one node required");
this.connectors = this.createConnectors(urlsOfNodesInCluster, databaseName);
this.nextConnector = 0;
}
private List<CouchDbConnector> createConnectors(List<URL> urlsForConnectors, String databaseName){
List<CouchDbConnector> result = new ArrayList<CouchDbConnector>();
for(URL url : urlsForConnectors){
HttpClient httpClient = new StdHttpClient.Builder().url(url).build();
CouchDbInstance dbInstance = new StdCouchDbInstance(httpClient);
// 2nd paramter true => Create database if not exists
CouchDbConnector dbConnector = dbInstance.createConnector(databaseName, true);
result.add(dbConnector);
}
return result;
}
private void updateNextConnector(){
this.nextConnector = (this.nextConnector+1) % this.connectors.size();
}
private CouchDbConnector getConnector(){
CouchDbConnector result = this.connectors.get(this.nextConnector);
this.updateNextConnector();
return result;
}
@Override
public void create(String id, Object o) {
boolean failed = true;
for(int i=0; i<this.connectors.size() && failed; i++){
try{
this.getConnector().create(id, o);
failed = false;
} catch(UpdateConflictException exc){
throw exc;
} catch(Exception exc){}
}
if(failed)
throw new NoNodeReacheableException();
}
@Override
public void create(Object o) {
boolean failed = true;
for(int i=0; i<this.connectors.size() && failed; i++){
try{
this.getConnector().create(o);
failed = false;
} catch(UpdateConflictException exc){
throw exc;
} catch(Exception exc){}
}
if(failed)
throw new NoNodeReacheableException();
}
@Override
public void update(Object o) {
boolean failed = true;
for(int i=0; i<this.connectors.size() && failed; i++){
try{
this.getConnector().update(o);
failed = false;
} catch(UpdateConflictException exc){
throw exc;
} catch(Exception exc){}
}
if(failed)
throw new NoNodeReacheableException();
}
@Override
public String delete(Object o) {
for(int i=0; i<this.connectors.size(); i++){
try{
return this.getConnector().delete(o);
} catch(UpdateConflictException exc){
throw exc;
} catch(Exception exc){}
}
throw new NoNodeReacheableException();
}
@Override
public String delete(String id, String revision) {
for(int i=0; i<this.connectors.size(); i++){
try{
return this.getConnector().delete(id, revision);
} catch(UpdateConflictException exc){
throw exc;
} catch(Exception exc){}
}
throw new NoNodeReacheableException();
}
@Override
public String copy(String sourceDocId, String targetDocId) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public String copy(String sourceDocId, String targetDocId,
String targetRevision) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public PurgeResult purge(Map<String, List<String>> revisionsToPurge) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public <T> T get(Class<T> c, String id) {
for(int i=0; i<this.connectors.size(); i++){
try{
return this.getConnector().get(c, id);
} catch(DocumentNotFoundException exc){
throw exc;
} catch(Exception exc){}
}
throw new NoNodeReacheableException();
}
@Override
public <T> T get(Class<T> c, String id, Options options) {
for(int i=0; i<this.connectors.size(); i++){
try{
return this.getConnector().get(c, id, options);
} catch(DocumentNotFoundException exc){
throw exc;
} catch(Exception exc){}
}
throw new NoNodeReacheableException();
}
@Override
public <T> T find(Class<T> c, String id) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public <T> T find(Class<T> c, String id, Options options) {
throw new UnsupportedOperationException("not implemented");
}
@Override
@Deprecated
public <T> T get(Class<T> c, String id, String rev) {
throw new UnsupportedOperationException("not implemented");
}
@Override
@Deprecated
public <T> T getWithConflicts(Class<T> c, String id) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public boolean contains(String id) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public InputStream getAsStream(String id) {
throw new UnsupportedOperationException("not implemented");
}
@Override
@Deprecated
public InputStream getAsStream(String id, String rev) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public InputStream getAsStream(String id, Options options) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public List<Revision> getRevisions(String id) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public String getCurrentRevision(String id) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public AttachmentInputStream getAttachment(String id, String attachmentId) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public AttachmentInputStream getAttachment(String id, String attachmentId,
String revision) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public String createAttachment(String docId, AttachmentInputStream data) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public String createAttachment(String docId, String revision,
AttachmentInputStream data) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public String deleteAttachment(String docId, String revision,
String attachmentId) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public List<String> getAllDocIds() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public <T> List<T> queryView(ViewQuery query, Class<T> type) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public <T> Page<T> queryForPage(ViewQuery query, PageRequest pr,
Class<T> type) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public ViewResult queryView(ViewQuery query) {
for(int i=0; i<this.connectors.size(); i++){
try{
return this.getConnector().queryView(query);
} catch(Exception exc){}
}
throw new NoNodeReacheableException();
}
@Override
public StreamingViewResult queryForStreamingView(ViewQuery query) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public InputStream queryForStream(ViewQuery query) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public void createDatabaseIfNotExists() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public String getDatabaseName() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public String path() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public HttpClient getConnection() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public DbInfo getDbInfo() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public DesignDocInfo getDesignDocInfo(String designDocId) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public void compact() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public void compactViews(String designDocumentId) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public void cleanupViews() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public int getRevisionLimit() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public void setRevisionLimit(int limit) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public ReplicationStatus replicateFrom(String source) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public ReplicationStatus replicateFrom(String source,
Collection<String> docIds) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public ReplicationStatus replicateTo(String target) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public ReplicationStatus replicateTo(String target,
Collection<String> docIds) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public void addToBulkBuffer(Object o) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public List<DocumentOperationResult> flushBulkBuffer() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public void clearBulkBuffer() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public List<DocumentOperationResult> executeBulk(InputStream inputStream) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public List<DocumentOperationResult> executeAllOrNothing(
InputStream inputStream) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public List<DocumentOperationResult> executeBulk(Collection<?> objects) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public List<DocumentOperationResult> executeAllOrNothing(
Collection<?> objects) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public List<DocumentChange> changes(ChangesCommand cmd) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public StreamingChangesResult changesAsStream(ChangesCommand cmd) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public ChangesFeed changesFeed(ChangesCommand cmd) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public String callUpdateHandler(String designDocID, String function,
String docId) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public String callUpdateHandler(String designDocID, String function,
String docId, Map<String, String> params) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public <T> T callUpdateHandler(UpdateHandlerRequest req, Class<T> c) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public String callUpdateHandler(UpdateHandlerRequest req) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public void ensureFullCommit() {
throw new UnsupportedOperationException("not implemented");
}
@Override
public void updateMultipart(String id, InputStream stream, String boundary,
long length, Options options) {
throw new UnsupportedOperationException("not implemented");
}
@Override
public void update(String id, InputStream document, long length,
Options options) {
boolean failed = true;
for(int i=0; i<this.connectors.size() && failed; i++){
try{
this.getConnector().update(id, document, length, options);
failed = false;
} catch(UpdateConflictException exc){
throw exc;
} catch(Exception exc){}
}
if(failed)
throw new NoNodeReacheableException();
}
}
| All update via master
| java/LoadBalancedConnector.java | All update via master |
|
Java | apache-2.0 | 636077950c5cfd1722af90f5e808d5123625fa7b | 0 | opennetworkinglab/spring-open,opennetworkinglab/spring-open,opennetworkinglab/spring-open,opennetworkinglab/spring-open,opennetworkinglab/spring-open,opennetworkinglab/spring-open | package net.onrc.onos.core.devicemanager;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import net.floodlightcontroller.core.FloodlightContext;
import net.floodlightcontroller.core.IFloodlightProviderService;
import net.floodlightcontroller.core.IOFMessageListener;
import net.floodlightcontroller.core.IOFSwitch;
import net.floodlightcontroller.core.IUpdate;
import net.floodlightcontroller.core.module.FloodlightModuleContext;
import net.floodlightcontroller.core.module.FloodlightModuleException;
import net.floodlightcontroller.core.module.IFloodlightModule;
import net.floodlightcontroller.core.module.IFloodlightService;
import net.floodlightcontroller.util.MACAddress;
import net.onrc.onos.core.datagrid.IDatagridService;
import net.onrc.onos.core.datagrid.IEventChannel;
import net.onrc.onos.core.datagrid.IEventChannelListener;
import net.onrc.onos.core.packet.ARP;
import net.onrc.onos.core.packet.DHCP;
import net.onrc.onos.core.packet.Ethernet;
import net.onrc.onos.core.packet.IPv4;
import net.onrc.onos.core.packet.UDP;
import net.onrc.onos.core.topology.INetworkGraphService;
import net.onrc.onos.core.topology.NetworkGraph;
import org.openflow.protocol.OFMessage;
import org.openflow.protocol.OFPacketIn;
import org.openflow.protocol.OFType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class OnosDeviceManager implements IFloodlightModule,
IOFMessageListener,
IOnosDeviceService,
IEventChannelListener<Long, OnosDevice> {
private static final Logger log = LoggerFactory.getLogger(OnosDeviceManager.class);
private static final int CLEANUP_SECOND = 60 * 60;
private static final int AGEING_MILLSEC = 60 * 60 * 1000;
private CopyOnWriteArrayList<IOnosDeviceListener> deviceListeners;
private IFloodlightProviderService floodlightProvider;
private static final ScheduledExecutorService EXECUTOR_SERVICE = Executors.newSingleThreadScheduledExecutor();
private IDatagridService datagrid;
private IEventChannel<Long, OnosDevice> eventChannel;
private static final String DEVICE_CHANNEL_NAME = "onos.device";
private Map<Long, OnosDevice> mapDevice = new ConcurrentHashMap<Long, OnosDevice>();
private INetworkGraphService networkGraphService;
private NetworkGraph networkGraph;
public enum OnosDeviceUpdateType {
ADD, DELETE, UPDATE;
}
private class OnosDeviceUpdate implements IUpdate {
private OnosDevice device;
private OnosDeviceUpdateType type;
public OnosDeviceUpdate(OnosDevice device, OnosDeviceUpdateType type) {
this.device = device;
this.type = type;
}
@Override
public void dispatch() {
if (type == OnosDeviceUpdateType.ADD) {
for (IOnosDeviceListener listener : deviceListeners) {
listener.onosDeviceAdded(device);
}
} else if (type == OnosDeviceUpdateType.DELETE) {
for (IOnosDeviceListener listener : deviceListeners) {
listener.onosDeviceRemoved(device);
}
}
}
}
@Override
public String getName() {
return "onosdevicemanager";
}
@Override
public boolean isCallbackOrderingPrereq(OFType type, String name) {
// We want link discovery to consume LLDP first otherwise we'll
// end up reading bad device info from LLDP packets
return type == OFType.PACKET_IN && "linkdiscovery".equals(name);
}
@Override
public boolean isCallbackOrderingPostreq(OFType type, String name) {
return type == OFType.PACKET_IN &&
("proxyarpmanager".equals(name) || "onosforwarding".equals(name));
}
@Override
public Command receive(IOFSwitch sw, OFMessage msg, FloodlightContext cntx) {
if (msg.getType().equals(OFType.PACKET_IN)) {
OFPacketIn pi = (OFPacketIn) msg;
Ethernet eth = IFloodlightProviderService.bcStore.
get(cntx, IFloodlightProviderService.CONTEXT_PI_PAYLOAD);
return processPacketIn(sw, pi, eth);
}
return Command.CONTINUE;
}
private Command processPacketIn(IOFSwitch sw, OFPacketIn pi, Ethernet eth) {
long dpid = sw.getId();
short portId = pi.getInPort();
Long mac = eth.getSourceMAC().toLong();
OnosDevice srcDevice =
getSourceDeviceFromPacket(eth, dpid, portId);
if (srcDevice == null) {
return Command.STOP;
}
//We check if it is the same device in datagrid to suppress the device update
OnosDevice exDev = mapDevice.get(mac);
if (exDev != null) {
if (exDev.equals(srcDevice)) {
//There is the same existing device. Update only ActiveSince time.
exDev.setLastSeenTimestamp(new Date());
if (log.isTraceEnabled()) {
log.debug("In the datagrid, there is the same device."
+ "Only update last seen time. dpid {}, port {}, mac {}, ip {}, lastSeenTime {}",
dpid, portId, srcDevice.getMacAddress(), srcDevice.getIpv4Address(), srcDevice.getLastSeenTimestamp().getTime());
}
return Command.CONTINUE;
} else if (srcDevice.getIpv4Address() == null &&
exDev.getSwitchDPID().equals(srcDevice.getSwitchDPID()) &&
exDev.getSwitchPort() == srcDevice.getSwitchPort()) {
//Vlan should be handled based on the Onos spec. Until then, don't handle it.
//Device attachment point and mac address are the same
//but the packet does not have an ip address.
exDev.setLastSeenTimestamp(new Date());
if (log.isTraceEnabled()) {
log.debug("In the datagrid, there is the same device with no ip."
+ "Keep ip and update last seen time. dpid {}, port {}, mac {}, ip {} lastSeenTime {}",
dpid, portId, srcDevice.getMacAddress(), exDev.getIpv4Address(), srcDevice.getLastSeenTimestamp().getTime());
}
return Command.CONTINUE;
}
}
//If the switch port we try to attach a new device already has a link, then stop adding device
if (networkGraph.getLink(dpid, (long) portId) != null) {
if (log.isTraceEnabled()) {
log.debug("Stop adding OnosDevice {} due to there is a link to: dpid {} port {}",
srcDevice.getMacAddress(), dpid, portId);
}
return Command.CONTINUE;
}
addOnosDevice(mac, srcDevice);
if (log.isTraceEnabled()) {
log.debug("Add device info in the set. dpid {}, port {}, mac {}, ip {}, lastSeenTime {}",
dpid, portId, srcDevice.getMacAddress(), srcDevice.getIpv4Address(), srcDevice.getLastSeenTimestamp().getTime());
}
return Command.CONTINUE;
}
//Thread to delete devices periodically.
//Remove all devices from the map first and then finally delete devices from the DB.
private class CleanDevice implements Runnable {
@Override
public void run() {
log.debug("called CleanDevice");
try {
Set<OnosDevice> deleteSet = new HashSet<OnosDevice>();
for (OnosDevice dev : mapDevice.values()) {
long now = new Date().getTime();
if ((now - dev.getLastSeenTimestamp().getTime() > AGEING_MILLSEC)) {
if (log.isTraceEnabled()) {
log.debug("Remove device info in the datagrid. dpid {}, port {}, mac {}, ip {}, lastSeenTime {}, diff {}",
dev.getSwitchDPID(), dev.getSwitchPort(), dev.getMacAddress(), dev.getIpv4Address(),
dev.getLastSeenTimestamp().getTime(), now - dev.getLastSeenTimestamp().getTime());
}
deleteSet.add(dev);
}
}
for (OnosDevice dev : deleteSet) {
deleteOnosDevice(dev);
}
} catch (Exception e) {
log.error("Error:", e);
}
}
}
/**
* Get IP address from packet if the packet is either an ARP
* or a DHCP packet
*
* @param eth
* @param dlAddr
* @return
*/
private int getSrcNwAddr(Ethernet eth, long dlAddr) {
if (eth.getPayload() instanceof ARP) {
ARP arp = (ARP) eth.getPayload();
if ((arp.getProtocolType() == ARP.PROTO_TYPE_IP) &&
(Ethernet.toLong(arp.getSenderHardwareAddress()) == dlAddr)) {
return IPv4.toIPv4Address(arp.getSenderProtocolAddress());
}
} else if (eth.getPayload() instanceof IPv4) {
IPv4 ipv4 = (IPv4) eth.getPayload();
if (ipv4.getPayload() instanceof UDP) {
UDP udp = (UDP) ipv4.getPayload();
if (udp.getPayload() instanceof DHCP) {
DHCP dhcp = (DHCP) udp.getPayload();
if (dhcp.getOpCode() == DHCP.OPCODE_REPLY) {
return ipv4.getSourceAddress();
}
}
}
}
return 0;
}
/**
* Parse an entity from an {@link Ethernet} packet.
*
* @param eth the packet to parse
* @param sw the switch on which the packet arrived
* @param pi the original packetin
* @return the entity from the packet
*/
private OnosDevice getSourceDeviceFromPacket(Ethernet eth,
long swdpid,
short port) {
byte[] dlAddrArr = eth.getSourceMACAddress();
long dlAddr = Ethernet.toLong(dlAddrArr);
// Ignore broadcast/multicast source
if ((dlAddrArr[0] & 0x1) != 0)
return null;
short vlan = eth.getVlanID();
int nwSrc = getSrcNwAddr(eth, dlAddr);
return new OnosDevice(MACAddress.valueOf(dlAddr),
((vlan >= 0) ? vlan : null),
((nwSrc != 0) ? nwSrc : null),
swdpid,
port,
new Date());
}
@Override
public Collection<Class<? extends IFloodlightService>> getModuleServices() {
List<Class<? extends IFloodlightService>> services =
new ArrayList<Class<? extends IFloodlightService>>();
services.add(IOnosDeviceService.class);
return services;
}
@Override
public Map<Class<? extends IFloodlightService>, IFloodlightService> getServiceImpls() {
Map<Class<? extends IFloodlightService>, IFloodlightService> impls =
new HashMap<Class<? extends IFloodlightService>, IFloodlightService>();
impls.put(IOnosDeviceService.class, this);
return impls;
}
@Override
public Collection<Class<? extends IFloodlightService>> getModuleDependencies() {
List<Class<? extends IFloodlightService>> dependencies =
new ArrayList<Class<? extends IFloodlightService>>();
dependencies.add(IFloodlightProviderService.class);
dependencies.add(INetworkGraphService.class);
dependencies.add(IDatagridService.class);
return dependencies;
}
@Override
public void init(FloodlightModuleContext context)
throws FloodlightModuleException {
floodlightProvider = context.getServiceImpl(IFloodlightProviderService.class);
EXECUTOR_SERVICE.scheduleAtFixedRate(new CleanDevice(), 30, CLEANUP_SECOND, TimeUnit.SECONDS);
deviceListeners = new CopyOnWriteArrayList<IOnosDeviceListener>();
datagrid = context.getServiceImpl(IDatagridService.class);
networkGraphService = context.getServiceImpl(INetworkGraphService.class);
networkGraph = networkGraphService.getNetworkGraph();
}
@Override
public void startUp(FloodlightModuleContext context) {
floodlightProvider.addOFMessageListener(OFType.PACKET_IN, this);
eventChannel = datagrid.addListener(DEVICE_CHANNEL_NAME, this,
Long.class,
OnosDevice.class);
}
@Override
public void deleteOnosDevice(OnosDevice dev) {
Long mac = dev.getMacAddress().toLong();
eventChannel.removeEntry(mac);
floodlightProvider.publishUpdate(new OnosDeviceUpdate(dev, OnosDeviceUpdateType.DELETE));
}
@Override
public void deleteOnosDeviceByMac(MACAddress mac) {
OnosDevice deleteDevice = mapDevice.get(mac.toLong());
deleteOnosDevice(deleteDevice);
}
@Override
public void addOnosDevice(Long mac, OnosDevice dev) {
eventChannel.addEntry(mac, dev);
floodlightProvider.publishUpdate(new OnosDeviceUpdate(dev, OnosDeviceUpdateType.ADD));
}
@Override
public void entryAdded(OnosDevice dev) {
Long mac = dev.getMacAddress().toLong();
mapDevice.put(mac, dev);
log.debug("Device added: device mac {}", mac);
}
@Override
public void entryRemoved(OnosDevice dev) {
Long mac = dev.getMacAddress().toLong();
mapDevice.remove(mac);
log.debug("Device removed: device mac {}", mac);
}
@Override
public void entryUpdated(OnosDevice dev) {
Long mac = dev.getMacAddress().toLong();
mapDevice.put(mac, dev);
log.debug("Device updated: device mac {}", mac);
}
@Override
public void addOnosDeviceListener(IOnosDeviceListener listener) {
deviceListeners.add(listener);
}
@Override
public void deleteOnosDeviceListener(IOnosDeviceListener listener) {
deviceListeners.remove(listener);
}
}
| src/main/java/net/onrc/onos/core/devicemanager/OnosDeviceManager.java | package net.onrc.onos.core.devicemanager;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import net.floodlightcontroller.core.FloodlightContext;
import net.floodlightcontroller.core.IFloodlightProviderService;
import net.floodlightcontroller.core.IOFMessageListener;
import net.floodlightcontroller.core.IOFSwitch;
import net.floodlightcontroller.core.IUpdate;
import net.floodlightcontroller.core.module.FloodlightModuleContext;
import net.floodlightcontroller.core.module.FloodlightModuleException;
import net.floodlightcontroller.core.module.IFloodlightModule;
import net.floodlightcontroller.core.module.IFloodlightService;
import net.floodlightcontroller.util.MACAddress;
import net.onrc.onos.core.datagrid.IDatagridService;
import net.onrc.onos.core.datagrid.IEventChannel;
import net.onrc.onos.core.datagrid.IEventChannelListener;
import net.onrc.onos.core.packet.ARP;
import net.onrc.onos.core.packet.DHCP;
import net.onrc.onos.core.packet.Ethernet;
import net.onrc.onos.core.packet.IPv4;
import net.onrc.onos.core.packet.UDP;
import net.onrc.onos.core.topology.INetworkGraphService;
import net.onrc.onos.core.topology.NetworkGraph;
import org.openflow.protocol.OFMessage;
import org.openflow.protocol.OFPacketIn;
import org.openflow.protocol.OFType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class OnosDeviceManager implements IFloodlightModule,
IOFMessageListener,
IOnosDeviceService,
IEventChannelListener<Long, OnosDevice> {
private static final Logger log = LoggerFactory.getLogger(OnosDeviceManager.class);
private static final int CLEANUP_SECOND = 60 * 60;
private static final int AGEING_MILLSEC = 60 * 60 * 1000;
private CopyOnWriteArrayList<IOnosDeviceListener> deviceListeners;
private IFloodlightProviderService floodlightProvider;
private static final ScheduledExecutorService EXECUTOR_SERVICE = Executors.newSingleThreadScheduledExecutor();
private IDatagridService datagrid;
private IEventChannel<Long, OnosDevice> eventChannel;
private static final String DEVICE_CHANNEL_NAME = "onos.device";
private Map<Long, OnosDevice> mapDevice = new ConcurrentHashMap<Long, OnosDevice>();
private INetworkGraphService networkGraphService;
private NetworkGraph networkGraph;
public enum OnosDeviceUpdateType {
ADD, DELETE, UPDATE;
}
private class OnosDeviceUpdate implements IUpdate {
private OnosDevice device;
private OnosDeviceUpdateType type;
public OnosDeviceUpdate(OnosDevice device, OnosDeviceUpdateType type) {
this.device = device;
this.type = type;
}
@Override
public void dispatch() {
if (type == OnosDeviceUpdateType.ADD) {
for (IOnosDeviceListener listener : deviceListeners) {
listener.onosDeviceAdded(device);
}
} else if (type == OnosDeviceUpdateType.DELETE) {
for (IOnosDeviceListener listener : deviceListeners) {
listener.onosDeviceRemoved(device);
}
}
}
}
@Override
public String getName() {
return "onosdevicemanager";
}
@Override
public boolean isCallbackOrderingPrereq(OFType type, String name) {
// We want link discovery to consume LLDP first otherwise we'll
// end up reading bad device info from LLDP packets
return type == OFType.PACKET_IN && "linkdiscovery".equals(name);
}
@Override
public boolean isCallbackOrderingPostreq(OFType type, String name) {
return type == OFType.PACKET_IN &&
("proxyarpmanager".equals(name) || "onosforwarding".equals(name));
}
@Override
public Command receive(IOFSwitch sw, OFMessage msg, FloodlightContext cntx) {
if (msg.getType().equals(OFType.PACKET_IN)) {
OFPacketIn pi = (OFPacketIn) msg;
Ethernet eth = IFloodlightProviderService.bcStore.
get(cntx, IFloodlightProviderService.CONTEXT_PI_PAYLOAD);
return processPacketIn(sw, pi, eth);
}
return Command.CONTINUE;
}
private Command processPacketIn(IOFSwitch sw, OFPacketIn pi, Ethernet eth) {
long dpid = sw.getId();
short portId = pi.getInPort();
Long mac = eth.getSourceMAC().toLong();
OnosDevice srcDevice =
getSourceDeviceFromPacket(eth, dpid, portId);
if (srcDevice == null) {
return Command.STOP;
}
//We check if it is the same device in datagrid to suppress the device update
OnosDevice exDev = mapDevice.get(mac);
if (exDev != null) {
if (exDev.equals(srcDevice)) {
//There is the same existing device. Update only ActiveSince time.
exDev.setLastSeenTimestamp(new Date());
if (log.isTraceEnabled()) {
log.debug("In the datagrid, there is the same device."
+ "Only update last seen time. dpid {}, port {}, mac {}, ip {}, lastSeenTime {}",
dpid, portId, srcDevice.getMacAddress(), srcDevice.getIpv4Address(), srcDevice.getLastSeenTimestamp().getTime());
}
return Command.CONTINUE;
} else if (srcDevice.getIpv4Address() == null &&
exDev.getSwitchDPID().equals(srcDevice.getSwitchDPID()) &&
exDev.getSwitchPort() == srcDevice.getSwitchPort()) {
//Vlan should be handled based on the Onos spec. Until then, don't handle it.
//Device attachment point and mac address are the same
//but the packet does not have an ip address.
exDev.setLastSeenTimestamp(new Date());
if (log.isTraceEnabled()) {
log.debug("In the datagrid, there is the same device with no ip."
+ "Keep ip and update last seen time. dpid {}, port {}, mac {}, ip {} lastSeenTime {}",
dpid, portId, srcDevice.getMacAddress(), exDev.getIpv4Address(), srcDevice.getLastSeenTimestamp().getTime());
}
return Command.CONTINUE;
}
}
//If the switch port we try to attach a new device already has a link, then stop adding device
if (networkGraph.getLink(dpid, (long) portId) != null) {
if (log.isTraceEnabled()) {
log.debug("Stop adding OnosDevice {} due to there is a link to: dpid {} port {}",
srcDevice.getMacAddress(), dpid, portId);
}
return Command.CONTINUE;
}
addOnosDevice(mac, srcDevice);
if (log.isTraceEnabled()) {
log.debug("Add device info in the set. dpid {}, port {}, mac {}, ip {}, lastSeenTime {}",
dpid, portId, srcDevice.getMacAddress(), srcDevice.getIpv4Address(), srcDevice.getLastSeenTimestamp().getTime());
}
return Command.CONTINUE;
}
//Thread to delete devices periodically.
//Remove all devices from the map first and then finally delete devices from the DB.
private class CleanDevice implements Runnable {
@Override
public void run() {
log.debug("called CleanDevice");
try {
Set<OnosDevice> deleteSet = new HashSet<OnosDevice>();
for (OnosDevice dev : mapDevice.values()) {
long now = new Date().getTime();
if ((now - dev.getLastSeenTimestamp().getTime() > AGEING_MILLSEC)) {
if (log.isTraceEnabled()) {
log.debug("Remove device info in the datagrid. dpid {}, port {}, mac {}, ip {}, lastSeenTime {}, diff {}",
dev.getSwitchDPID(), dev.getSwitchPort(), dev.getMacAddress(), dev.getIpv4Address(),
dev.getLastSeenTimestamp().getTime(), now - dev.getLastSeenTimestamp().getTime());
}
deleteSet.add(dev);
}
}
for (OnosDevice dev : deleteSet) {
deleteOnosDevice(dev);
}
} catch (Exception e) {
log.error("Error:", e);
}
}
}
/**
* Get IP address from packet if the packet is either an ARP
* or a DHCP packet
*
* @param eth
* @param dlAddr
* @return
*/
private int getSrcNwAddr(Ethernet eth, long dlAddr) {
if (eth.getPayload() instanceof ARP) {
ARP arp = (ARP) eth.getPayload();
if ((arp.getProtocolType() == ARP.PROTO_TYPE_IP) &&
(Ethernet.toLong(arp.getSenderHardwareAddress()) == dlAddr)) {
return IPv4.toIPv4Address(arp.getSenderProtocolAddress());
}
} else if (eth.getPayload() instanceof IPv4) {
IPv4 ipv4 = (IPv4) eth.getPayload();
if (ipv4.getPayload() instanceof UDP) {
UDP udp = (UDP) ipv4.getPayload();
if (udp.getPayload() instanceof DHCP) {
DHCP dhcp = (DHCP) udp.getPayload();
if (dhcp.getOpCode() == DHCP.OPCODE_REPLY) {
return ipv4.getSourceAddress();
}
}
}
}
return 0;
}
/**
* Parse an entity from an {@link Ethernet} packet.
*
* @param eth the packet to parse
* @param sw the switch on which the packet arrived
* @param pi the original packetin
* @return the entity from the packet
*/
private OnosDevice getSourceDeviceFromPacket(Ethernet eth,
long swdpid,
short port) {
byte[] dlAddrArr = eth.getSourceMACAddress();
long dlAddr = Ethernet.toLong(dlAddrArr);
// Ignore broadcast/multicast source
if ((dlAddrArr[0] & 0x1) != 0)
return null;
short vlan = eth.getVlanID();
int nwSrc = getSrcNwAddr(eth, dlAddr);
return new OnosDevice(MACAddress.valueOf(dlAddr),
((vlan >= 0) ? vlan : null),
((nwSrc != 0) ? nwSrc : null),
swdpid,
port,
new Date());
}
@Override
public Collection<Class<? extends IFloodlightService>> getModuleServices() {
List<Class<? extends IFloodlightService>> services =
new ArrayList<Class<? extends IFloodlightService>>();
services.add(IOnosDeviceService.class);
return services;
}
@Override
public Map<Class<? extends IFloodlightService>, IFloodlightService> getServiceImpls() {
Map<Class<? extends IFloodlightService>, IFloodlightService> impls =
new HashMap<Class<? extends IFloodlightService>, IFloodlightService>();
impls.put(IOnosDeviceService.class, this);
return impls;
}
@Override
public Collection<Class<? extends IFloodlightService>> getModuleDependencies() {
List<Class<? extends IFloodlightService>> dependencies =
new ArrayList<Class<? extends IFloodlightService>>();
dependencies.add(IFloodlightProviderService.class);
dependencies.add(INetworkGraphService.class);
dependencies.add(IDatagridService.class);
return dependencies;
}
@Override
public void init(FloodlightModuleContext context)
throws FloodlightModuleException {
floodlightProvider = context.getServiceImpl(IFloodlightProviderService.class);
EXECUTOR_SERVICE.scheduleAtFixedRate(new CleanDevice(), 30, CLEANUP_SECOND, TimeUnit.SECONDS);
deviceListeners = new CopyOnWriteArrayList<IOnosDeviceListener>();
datagrid = context.getServiceImpl(IDatagridService.class);
networkGraphService = context.getServiceImpl(INetworkGraphService.class);
networkGraph = networkGraphService.getNetworkGraph();
}
@Override
public void startUp(FloodlightModuleContext context) {
floodlightProvider.addOFMessageListener(OFType.PACKET_IN, this);
eventChannel = datagrid.addListener(DEVICE_CHANNEL_NAME, this,
Long.class,
OnosDevice.class);
}
@Override
public void deleteOnosDevice(OnosDevice dev) {
Long mac = dev.getMacAddress().toLong();
eventChannel.removeEntry(mac);
floodlightProvider.publishUpdate(new OnosDeviceUpdate(dev, OnosDeviceUpdateType.DELETE));
}
@Override
public void deleteOnosDeviceByMac(MACAddress mac) {
OnosDevice deleteDevice = mapDevice.get(mac);
deleteOnosDevice(deleteDevice);
}
@Override
public void addOnosDevice(Long mac, OnosDevice dev) {
eventChannel.addEntry(mac, dev);
floodlightProvider.publishUpdate(new OnosDeviceUpdate(dev, OnosDeviceUpdateType.ADD));
}
@Override
public void entryAdded(OnosDevice dev) {
Long mac = dev.getMacAddress().toLong();
mapDevice.put(mac, dev);
log.debug("Device added: device mac {}", mac);
}
@Override
public void entryRemoved(OnosDevice dev) {
Long mac = dev.getMacAddress().toLong();
mapDevice.remove(mac);
log.debug("Device removed: device mac {}", mac);
}
@Override
public void entryUpdated(OnosDevice dev) {
Long mac = dev.getMacAddress().toLong();
mapDevice.put(mac, dev);
log.debug("Device updated: device mac {}", mac);
}
@Override
public void addOnosDeviceListener(IOnosDeviceListener listener) {
deviceListeners.add(listener);
}
@Override
public void deleteOnosDeviceListener(IOnosDeviceListener listener) {
deviceListeners.remove(listener);
}
}
| Fix issues found by FindBugs: GC_UNRELATED_TYPES
http://findbugs.sourceforge.net/bugDescriptions.html#GC_UNRELATED_TYPES
Change-Id: I744086323ed6c2b03d9e2853459ab1ae03fd87b2
| src/main/java/net/onrc/onos/core/devicemanager/OnosDeviceManager.java | Fix issues found by FindBugs: GC_UNRELATED_TYPES http://findbugs.sourceforge.net/bugDescriptions.html#GC_UNRELATED_TYPES |
|
Java | apache-2.0 | 11c1ddde45620ea20c5fdfd5be7a5806a2ef5c50 | 0 | joansmith/supernode,bitsofproof/supernode | package com.bitsofproof.supernode.api;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;
public class ScriptFormat
{
// unfortunatelly unused: https://bitcointalk.org/index.php?topic=120836.0
public static final int SIGHASH_ALL = 1;
public static final int SIGHASH_NONE = 2;
public static final int SIGHASH_SINGLE = 3;
public static final int SIGHASH_ANYONECANPAY = 0x80;
public static enum Opcode
{
OP_FALSE (0), OP_PUSH1 (1), OP_PUSH2 (2), OP_PUSH3 (3), OP_PUSH4 (4), OP_PUSH5 (5), OP_PUSH6 (6), OP_PUSH7 (7), OP_PUSH8 (8), OP_PUSH9 (9), OP_PUSH10 (
10), OP_PUSH11 (11), OP_PUSH12 (12), OP_PUSH13 (13), OP_PUSH14 (14), OP_PUSH15 (15), OP_PUSH16 (16), OP_PUSH17 (17), OP_PUSH18 (18), OP_PUSH19 (
19), OP_PUSH20 (20), OP_PUSH21 (21), OP_PUSH22 (22), OP_PUSH23 (23), OP_PUSH24 (24), OP_PUSH25 (25), OP_PUSH26 (26), OP_PUSH27 (27), OP_PUSH28 (
28), OP_PUSH29 (29), OP_PUSH30 (30), OP_PUSH31 (31), OP_PUSH32 (32), OP_PUSH33 (33), OP_PUSH34 (34), OP_PUSH35 (35), OP_PUSH36 (36), OP_PUSH37 (
37), OP_PUSH38 (38), OP_PUSH39 (39), OP_PUSH40 (40), OP_PUSH41 (41), OP_PUSH42 (42), OP_PUSH43 (43), OP_PUSH44 (44), OP_PUSH45 (45), OP_PUSH46 (
46), OP_PUSH47 (47), OP_PUSH48 (48), OP_PUSH49 (49), OP_PUSH50 (50), OP_PUSH51 (51), OP_PUSH52 (52), OP_PUSH53 (53), OP_PUSH54 (54), OP_PUSH55 (
55), OP_PUSH56 (56), OP_PUSH57 (57), OP_PUSH58 (58), OP_PUSH59 (59), OP_PUSH60 (60), OP_PUSH61 (61), OP_PUSH62 (62), OP_PUSH63 (63), OP_PUSH64 (
64), OP_PUSH65 (65), OP_PUSH66 (66), OP_PUSH67 (67), OP_PUSH68 (68), OP_PUSH69 (69), OP_PUSH70 (70), OP_PUSH71 (71), OP_PUSH72 (72), OP_PUSH73 (
73), OP_PUSH74 (74), OP_PUSH75 (75),
OP_PUSHDATA1 (76), OP_PUSHDATA2 (77), OP_PUSHDATA4 (78), OP_1NEGATE (79),
OP_RESERVED (80),
OP_1 (81), OP_2 (82), OP_3 (83), OP_4 (84), OP_5 (85), OP_6 (86), OP_7 (87), OP_8 (88), OP_9 (89), OP_10 (90), OP_11 (91), OP_12 (92), OP_13 (93),
OP_14 (94), OP_15 (95), OP_16 (96),
OP_NOP (97), OP_VER (98), OP_IF (99), OP_NOTIF (100), OP_VERIF (101), OP_VERNOTIF (102),
OP_ELSE (103), OP_ENDIF (104), OP_VERIFY (105), OP_RETURN (106),
OP_TOALTSTACK (107), OP_FROMALTSTACK (108), OP_2DROP (109), OP_2DUP (110), OP_3DUP (111), OP_2OVER (112), OP_2ROT (113), OP_2SWAP (114),
OP_IFDUP (115), OP_DEPTH (116), OP_DROP (117), OP_DUP (118), OP_NIP (119), OP_OVER (120), OP_PICK (121), OP_ROLL (122), OP_ROT (123), OP_SWAP (124),
OP_TUCK (125),
OP_CAT (126), OP_SUBSTR (127), OP_LEFT (128), OP_RIGHT (129), OP_SIZE (130), OP_INVERT (131), OP_AND (132), OP_OR (133), OP_XOR (134),
OP_EQUAL (135), OP_EQUALVERIFY (136),
OP_RESERVED1 (137), OP_RESERVED2 (138),
OP_1ADD (139), // 0x8b in out 1 is added to the input.
OP_1SUB (140), // 0x8c in out 1 is subtracted from the input.
OP_2MUL (141), // 0x8d in out The input is multiplied by 2. Currently
// disabled.
OP_2DIV (142), // 0x8e in out The input is divided by 2. Currently
// disabled.
OP_NEGATE (143), // 0x8f in out The sign of the input is flipped.
OP_ABS (144), // 0x90 in out The input is made positive.
OP_NOT (145), // 0x91 in out If the input is 0 or 1, it is flipped.
// Otherwise the output will be 0.
OP_0NOTEQUAL (146), // 0x92 in out Returns 0 if the input is 0. 1
// otherwise.
OP_ADD (147), // 0x93 a b out a is added to b.
OP_SUB (148), // 0x94 a b out b is subtracted from a.
OP_MUL (149), // 0x95 a b out a is multiplied by b. Currently disabled.
OP_DIV (150), // 0x96 a b out a is divided by b. Currently disabled.
OP_MOD (151), // 0x97 a b out Returns the remainder after dividing a by
// b. Currently disabled.
OP_LSHIFT (152), // 0x98 a b out Shifts a left b bits, preserving sign.
// Currently disabled.
OP_RSHIFT (153), // 0x99 a b out Shifts a right b bits, preserving sign.
// Currently disabled.
OP_BOOLAND (154), // 0x9a a b out If both a and b are not 0, the output
// is 1. Otherwise 0.
OP_BOOLOR (155), // 0x9b a b out If a or b is not 0, the output is 1.
// Otherwise 0.
OP_NUMEQUAL (156), // 0x9c a b out Returns 1 if the numbers are equal, 0
// otherwise.
OP_NUMEQUALVERIFY (157), // 0x9d a b out Same as OP_NUMEQUAL, but runs
// OP_VERIFY afterward.
OP_NUMNOTEQUAL (158), // 0x9e a b out Returns 1 if the numbers are not
// equal, 0 otherwise.
OP_LESSTHAN (159), // 0x9f a b out Returns 1 if a is less than b, 0
// otherwise.
OP_GREATERTHAN (160), // 0xa0 a b out Returns 1 if a is greater than b,
// 0
// otherwise.
OP_LESSTHANOREQUAL (161), // 0xa1 a b out Returns 1 if a is less than or
// equal to b, 0 otherwise.
OP_GREATERTHANOREQUAL (162), // 0xa2 a b out Returns 1 if a is greater
// than or equal to b, 0 otherwise.
OP_MIN (163), // 0xa3 a b out Returns the smaller of a and b.
OP_MAX (164), // 0xa4 a b out Returns the larger of a and b.
OP_WITHIN (165), // 0xa5 x min max out Returns 1 if x is within the
// specified range (left-inclusive), 0 otherwise.
OP_RIPEMD160 (166), // 0xa6 in hash The input is hashed using
// RIPEMD-160.
OP_SHA1 (167), // 0xa7 in hash The input is hashed using SHA-1.
OP_SHA256 (168), // 0xa8 in hash The input is hashed using SHA-256.
OP_HASH160 (169), // 0xa9 in hash The input is hashed twice: first with
// SHA-256 and then with RIPEMD-160.
OP_HASH256 (170), // 0xaa in hash The input is hashed two times with
// SHA-256.
OP_CODESEPARATOR (171), // 0xab Nothing Nothing All of the signature
// checking words will only match signatures to
// the data after the most recently-executed
// OP_CODESEPARATOR.
OP_CHECKSIG (172), // 0xac sig pubkey True / false The entire
// transaction's outputs, inputs, and script (from
// the most recently-executed OP_CODESEPARATOR to
// the end) are hashed. The signature used by
// OP_CHECKSIG must be a valid signature for this
// hash and public key. If it is, 1 is returned, 0
// otherwise.
OP_CHECKSIGVERIFY (173), // 0xad sig pubkey True / false Same as
// OP_CHECKSIG, but OP_VERIFY is executed
// afterward.
OP_CHECKMULTISIG (174), // 0xae x sig1 sig2 ... <number of signatures>
// pub1 pub2 <number of public keys> True /
// False For each signature and public key pair,
// OP_CHECKSIG is executed. If more public keys
// than signatures are listed, some key/sig
// pairs can fail. All signatures need to match
// a public key. If all signatures are valid, 1
// is returned, 0 otherwise. Due to a bug, one
// extra unused value is removed from the stack.
OP_CHECKMULTISIGVERIFY (175), // 0xaf x sig1 sig2 ... <number of
// signatures> pub1 pub2 ... <number of
// public keys> True / False Same as
// OP_CHECKMULTISIG, but OP_VERIFY is
// executed afterward.
OP_NOP1 (176), OP_NOP2 (177), OP_NOP3 (178), OP_NOP4 (179), OP_NOP5 (180), OP_NOP6 (181), OP_NOP7 (182), OP_NOP8 (183), OP_NOP9 (184), OP_NOP10 (185);
final int o;
Opcode (int n)
{
this.o = n;
}
}
public static class Token
{
public Opcode op;
public byte[] data;
public Token ()
{
}
public Token (Opcode op)
{
this.op = op;
data = null;
}
}
public static class Reader
{
private final byte[] bytes;
int cursor;
public Reader (byte[] script)
{
this.bytes = script;
this.cursor = 0;
}
public boolean eof ()
{
return cursor == bytes.length;
}
public byte[] readBytes (int n)
{
byte[] b = new byte[n];
System.arraycopy (bytes, cursor, b, 0, n);
cursor += n;
return b;
}
public void skipBytes (int n)
{
cursor += n;
}
public int readByte ()
{
return bytes[cursor++] & 0xff;
}
public long readInt16 ()
{
long value = ((bytes[cursor] & 0xFFL) << 0) | ((bytes[cursor + 1] & 0xFFL) << 8);
cursor += 2;
return value;
}
public long readInt32 ()
{
long value =
((bytes[cursor] & 0xFFL) << 0) | ((bytes[cursor + 1] & 0xFFL) << 8) | ((bytes[cursor + 2] & 0xFFL) << 16)
| ((bytes[cursor + 3] & 0xFFL) << 24);
cursor += 4;
return value;
}
}
public static class Writer
{
private final ByteArrayOutputStream s;
public Writer ()
{
s = new ByteArrayOutputStream ();
}
public Writer (ByteArrayOutputStream s)
{
this.s = s;
}
public void writeByte (int n)
{
s.write (n);
}
public void writeBytes (byte[] b)
{
try
{
s.write (b);
}
catch ( IOException e )
{
}
}
public void writeData (byte[] data)
{
if ( data.length <= 75 )
{
writeByte (data.length);
writeBytes (data);
}
else if ( data.length <= 0xff )
{
writeByte (Opcode.OP_PUSHDATA1.o);
writeByte (data.length);
writeBytes (data);
}
else if ( data.length <= 0xffff )
{
writeByte (Opcode.OP_PUSHDATA2.o);
writeInt16 (data.length);
writeBytes (data);
}
else if ( data.length <= 0x7fffffff )
{
writeByte (Opcode.OP_PUSHDATA4.o);
writeInt16 (data.length);
writeBytes (data);
}
}
public void writeToken (Token token)
{
s.write (token.op.o);
if ( token.data != null )
{
try
{
s.write (token.data);
}
catch ( IOException e )
{
}
}
}
public void writeInt16 (long n)
{
s.write ((int) (0xFFL & n));
s.write ((int) (0xFFL & (n >> 8)));
}
public void writeInt32 (long n)
{
s.write ((int) (0xFF & n));
s.write ((int) (0xFF & (n >> 8)));
s.write ((int) (0xFF & (n >> 16)));
s.write ((int) (0xFF & (n >> 24)));
}
public byte[] toByteArray ()
{
return s.toByteArray ();
}
}
public static class Tokenizer
{
private final Reader reader;
public Tokenizer (byte[] script)
{
reader = new Reader (script);
}
public boolean hashMoreElements ()
{
return !reader.eof ();
}
public int getCursor ()
{
return reader.cursor;
}
@SuppressWarnings ("incomplete-switch")
public Token nextToken () throws ValidationException
{
Token token = new Token ();
int ix = reader.readByte ();
if ( ix > 185 )
{
throw new ValidationException ("Invalid script" + ix + " opcode at " + reader.cursor);
}
Opcode op = Opcode.values ()[ix];
token.op = op;
if ( op.o <= 75 )
{
token.data = reader.readBytes (op.o);
return token;
}
switch ( op )
{
case OP_PUSHDATA1:
{
token.data = reader.readBytes (reader.readByte ());
break;
}
case OP_PUSHDATA2:
{
token.data = reader.readBytes ((int) reader.readInt16 ());
break;
}
case OP_PUSHDATA4:
{
token.data = reader.readBytes ((int) reader.readInt32 ());
break;
}
}
return token;
}
}
public static class Number
{
byte[] w;
public Number (byte[] b)
{
w = new byte[b.length];
System.arraycopy (b, 0, w, 0, b.length);
}
public Number (long n) throws ValidationException
{
if ( n == 0 )
{
w = new byte[0];
return;
}
boolean negative = false;
if ( n < 0 )
{
negative = true;
n = -n;
}
if ( n <= 0x7f )
{
w = new byte[] { (byte) (n & 0xff) };
w[0] |= negative ? 0x80 : 0;
return;
}
if ( n <= 0x7fff )
{
w = new byte[] { (byte) (n & 0xff), (byte) ((n >> 8) & 0xff) };
w[1] |= negative ? 0x80 : 0;
return;
}
if ( n <= 0x7fffff )
{
w = new byte[] { (byte) (n & 0xff), (byte) ((n >> 8) & 0xff), (byte) ((n >> 16) & 0xff) };
w[2] |= negative ? 0x80 : 0;
return;
}
w = new byte[] { (byte) (n & 0xff), (byte) ((n >> 8) & 0xff), (byte) ((n >> 16) & 0xff), (byte) ((n >> 24) & 0xff) };
if ( ((n >> 24) & 0x80) != 0 )
{
byte[] tmp = new byte[5];
System.arraycopy (w, 0, tmp, 0, 4);
w = tmp;
}
w[w.length - 1] |= negative ? 0x80 : 0;
}
public byte[] toByteArray ()
{
byte[] tmp = new byte[w.length];
System.arraycopy (w, 0, tmp, 0, w.length);
return tmp;
}
public long intValue () throws ValidationException
{
if ( w.length == 0 )
{
return 0;
}
boolean negative = false;
if ( (w[w.length - 1] & 0x80) != 0 )
{
negative = true;
w[w.length - 1] &= 0x7f;
}
int n = 0;
if ( w.length > 0 )
{
n += w[0] & 0xff;
}
if ( w.length > 1 )
{
n += (w[1] & 0xff) << 8;
}
if ( w.length > 2 )
{
n += (w[2] & 0xff) << 16;
}
if ( w.length > 3 )
{
n += (w[3] & 0xff) << 24;
}
if ( negative )
{
n = -n;
}
return n;
}
}
public static int intValue (byte[] n) throws ValidationException
{
return (int) new ScriptFormat.Number (n).intValue ();
}
public static List<ScriptFormat.Token> parse (byte[] script) throws ValidationException
{
List<ScriptFormat.Token> p = new ArrayList<ScriptFormat.Token> ();
ScriptFormat.Tokenizer tokenizer = new ScriptFormat.Tokenizer (script);
while ( tokenizer.hashMoreElements () )
{
p.add (tokenizer.nextToken ());
}
return p;
}
public static boolean isPushOnly (byte[] script) throws ValidationException
{
for ( ScriptFormat.Token t : parse (script) )
{
if ( t.op.o > 78 )
{
return false;
}
}
return true;
}
@SuppressWarnings ("incomplete-switch")
public static int sigOpCount (byte[] script) throws ValidationException
{
int nsig = 0;
ScriptFormat.Opcode last = ScriptFormat.Opcode.OP_FALSE;
ScriptFormat.Tokenizer tokenizer = new ScriptFormat.Tokenizer (script);
while ( tokenizer.hashMoreElements () )
{
ScriptFormat.Token token = tokenizer.nextToken ();
if ( token.data == null )
{
switch ( token.op )
{
case OP_CHECKSIG:
case OP_CHECKSIGVERIFY:
++nsig;
break;
case OP_CHECKMULTISIG:
case OP_CHECKMULTISIGVERIFY:
// https://en.bitcoin.it/wiki/BIP_0016
if ( last.o >= 0 && last.o <= 16 )
{
nsig += last.o;
}
else
{
nsig += 20;
}
break;
}
last = token.op;
}
}
return nsig;
}
public static byte[] fromReadable (String s)
{
ScriptFormat.Writer writer = new ScriptFormat.Writer ();
StringTokenizer tokenizer = new StringTokenizer (s, " ");
while ( tokenizer.hasMoreElements () )
{
String token = tokenizer.nextToken ();
ScriptFormat.Opcode op = ScriptFormat.Opcode.OP_FALSE;
if ( token.startsWith ("0x") )
{
byte[] data = ByteUtils.fromHex (token.substring (2));
writer.writeBytes (data);
}
else if ( token.startsWith ("'") )
{
String str = token.substring (1, token.length () - 1);
try
{
writer.writeData (str.getBytes ("US-ASCII"));
}
catch ( UnsupportedEncodingException e )
{
}
}
else if ( (token.startsWith ("-") || token.startsWith ("0") || token.startsWith ("1") || token.startsWith ("2") || token.startsWith ("3")
|| token.startsWith ("4") || token.startsWith ("5") || token.startsWith ("6") || token.startsWith ("7") || token.startsWith ("8") || token
.startsWith ("9"))
&& !token.equals ("0NOTEQUAL")
&& !token.equals ("1NEGATE")
&& !token.equals ("2DROP")
&& !token.equals ("2DUP")
&& !token.equals ("3DUP")
&& !token.equals ("2OVER")
&& !token.equals ("2ROT")
&& !token.equals ("2SWAP")
&& !token.equals ("1ADD")
&& !token.equals ("1SUB") && !token.equals ("2MUL") && !token.equals ("2DIV") && !token.equals ("2SWAP") )
{
try
{
long n = Long.valueOf (token).longValue ();
if ( n >= 1 && n <= 16 )
{
writer.writeByte (Opcode.OP_1.o + (int) n - 1);
}
else
{
writer.writeData (new Number (n).toByteArray ());
}
}
catch ( NumberFormatException e )
{
}
catch ( ValidationException e )
{
}
}
else
{
if ( token.startsWith ("OP_") )
{
op = ScriptFormat.Opcode.valueOf (token);
}
else
{
op = ScriptFormat.Opcode.valueOf ("OP_" + token);
}
writer.writeByte (op.o);
}
}
return writer.toByteArray ();
}
public static String toReadable (byte[] script) throws ValidationException
{
List<ScriptFormat.Token> tokens = parse (script);
StringBuffer b = new StringBuffer ();
boolean first = true;
for ( ScriptFormat.Token token : tokens )
{
if ( !first )
{
b.append (" ");
}
first = false;
if ( token.data != null )
{
if ( token.data.length > 0 )
{
b.append ("0x" + ByteUtils.toHex (token.data));
}
else
{
b.append ("0x0");
}
}
else
{
b.append (token.op.toString ().substring (2));
}
}
return b.toString ();
}
public static boolean isPayToScriptHash (byte[] script)
{
try
{
List<ScriptFormat.Token> parsed = parse (script);
return parsed.size () == 3 && parsed.get (0).op == ScriptFormat.Opcode.OP_HASH160 && (parsed.get (1).data != null && parsed.get (1).op.o <= 75)
&& parsed.get (1).data.length == 20 && parsed.get (2).op == ScriptFormat.Opcode.OP_EQUAL;
}
catch ( ValidationException e )
{
return false;
}
}
public static boolean isPayToKey (byte[] script)
{
try
{
List<ScriptFormat.Token> parsed = parse (script);
return parsed.size () == 2 && parsed.get (0).data != null && parsed.get (1).op == ScriptFormat.Opcode.OP_CHECKSIG;
}
catch ( ValidationException e )
{
return false;
}
}
public static boolean isPayToAddress (byte[] script)
{
try
{
List<ScriptFormat.Token> parsed = parse (script);
return parsed.size () == 5 && parsed.get (0).op == ScriptFormat.Opcode.OP_DUP && parsed.get (1).op == ScriptFormat.Opcode.OP_HASH160
&& parsed.get (2).data != null && parsed.get (3).op == ScriptFormat.Opcode.OP_EQUALVERIFY
&& parsed.get (4).op == ScriptFormat.Opcode.OP_CHECKSIG;
}
catch ( ValidationException e )
{
return false;
}
}
public static boolean isMultiSig (byte[] script)
{
try
{
List<ScriptFormat.Token> parsed = parse (script);
int nkeys = -1;
int nvotes = -1;
for ( int i = 0; i < parsed.size (); ++i )
{
if ( parsed.get (i).op == ScriptFormat.Opcode.OP_CHECKMULTISIG || parsed.get (i).op == ScriptFormat.Opcode.OP_CHECKMULTISIGVERIFY )
{
nkeys = parsed.get (i - 1).op.ordinal () - ScriptFormat.Opcode.OP_1.ordinal () + 1;
nvotes = parsed.get (i - nkeys - 2).op.ordinal () - ScriptFormat.Opcode.OP_1.ordinal () + 1;
break;
}
}
if ( nkeys <= 0 || nkeys > 3 )
{
return false;
}
if ( parsed.size () != nkeys + 3 )
{
return false;
}
if ( nvotes < 0 || nvotes > nkeys )
{
return false;
}
}
catch ( ValidationException e )
{
return false;
}
return true;
}
public static boolean isStandard (byte[] script)
{
return isPayToAddress (script) || isPayToKey (script) || isPayToScriptHash (script) || isMultiSig (script);
}
public static byte[] getPayToAddressScript (byte[] keyHash)
{
ScriptFormat.Writer writer = new ScriptFormat.Writer ();
writer.writeToken (new ScriptFormat.Token (ScriptFormat.Opcode.OP_DUP));
writer.writeToken (new ScriptFormat.Token (ScriptFormat.Opcode.OP_HASH160));
writer.writeData (keyHash);
writer.writeToken (new ScriptFormat.Token (ScriptFormat.Opcode.OP_EQUALVERIFY));
writer.writeToken (new ScriptFormat.Token (ScriptFormat.Opcode.OP_CHECKSIG));
return writer.toByteArray ();
}
public static byte[] deleteSignatureFromScript (byte[] script, byte[] sig) throws ValidationException
{
ScriptFormat.Tokenizer tokenizer = new ScriptFormat.Tokenizer (script);
ScriptFormat.Writer writer = new ScriptFormat.Writer ();
while ( tokenizer.hashMoreElements () )
{
ScriptFormat.Token token = tokenizer.nextToken ();
if ( token.data != null && token.op.o <= 75 && token.data.length == sig.length )
{
boolean found = true;
for ( int i = 0; i < sig.length; ++i )
{
if ( sig[i] != token.data[i] )
{
found = false;
break;
}
}
if ( !found )
{
writer.writeToken (token);
}
}
else
{
writer.writeToken (token);
}
}
return writer.toByteArray ();
}
}
| src/main/java/com/bitsofproof/supernode/api/ScriptFormat.java | package com.bitsofproof.supernode.api;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;
public class ScriptFormat
{
// unfortunatelly unused: https://bitcointalk.org/index.php?topic=120836.0
public static final int SIGHASH_ALL = 1;
public static final int SIGHASH_NONE = 2;
public static final int SIGHASH_SINGLE = 3;
public static final int SIGHASH_ANYONECANPAY = 0x80;
public static enum Opcode
{
OP_FALSE (0), OP_PUSH1 (1), OP_PUSH2 (2), OP_PUSH3 (3), OP_PUSH4 (4), OP_PUSH5 (5), OP_PUSH6 (6), OP_PUSH7 (7), OP_PUSH8 (8), OP_PUSH9 (9), OP_PUSH10 (
10), OP_PUSH11 (11), OP_PUSH12 (12), OP_PUSH13 (13), OP_PUSH14 (14), OP_PUSH15 (15), OP_PUSH16 (16), OP_PUSH17 (17), OP_PUSH18 (18), OP_PUSH19 (
19), OP_PUSH20 (20), OP_PUSH21 (21), OP_PUSH22 (22), OP_PUSH23 (23), OP_PUSH24 (24), OP_PUSH25 (25), OP_PUSH26 (26), OP_PUSH27 (27), OP_PUSH28 (
28), OP_PUSH29 (29), OP_PUSH30 (30), OP_PUSH31 (31), OP_PUSH32 (32), OP_PUSH33 (33), OP_PUSH34 (34), OP_PUSH35 (35), OP_PUSH36 (36), OP_PUSH37 (
37), OP_PUSH38 (38), OP_PUSH39 (39), OP_PUSH40 (40), OP_PUSH41 (41), OP_PUSH42 (42), OP_PUSH43 (43), OP_PUSH44 (44), OP_PUSH45 (45), OP_PUSH46 (
46), OP_PUSH47 (47), OP_PUSH48 (48), OP_PUSH49 (49), OP_PUSH50 (50), OP_PUSH51 (51), OP_PUSH52 (52), OP_PUSH53 (53), OP_PUSH54 (54), OP_PUSH55 (
55), OP_PUSH56 (56), OP_PUSH57 (57), OP_PUSH58 (58), OP_PUSH59 (59), OP_PUSH60 (60), OP_PUSH61 (61), OP_PUSH62 (62), OP_PUSH63 (63), OP_PUSH64 (
64), OP_PUSH65 (65), OP_PUSH66 (66), OP_PUSH67 (67), OP_PUSH68 (68), OP_PUSH69 (69), OP_PUSH70 (70), OP_PUSH71 (71), OP_PUSH72 (72), OP_PUSH73 (
73), OP_PUSH74 (74), OP_PUSH75 (75),
OP_PUSHDATA1 (76), OP_PUSHDATA2 (77), OP_PUSHDATA4 (78), OP_1NEGATE (79),
OP_RESERVED (80),
OP_1 (81), OP_2 (82), OP_3 (83), OP_4 (84), OP_5 (85), OP_6 (86), OP_7 (87), OP_8 (88), OP_9 (89), OP_10 (90), OP_11 (91), OP_12 (92), OP_13 (93),
OP_14 (94), OP_15 (95), OP_16 (96),
OP_NOP (97), OP_VER (98), OP_IF (99), OP_NOTIF (100), OP_VERIF (101), OP_VERNOTIF (102),
OP_ELSE (103), OP_ENDIF (104), OP_VERIFY (105), OP_RETURN (106),
OP_TOALTSTACK (107), OP_FROMALTSTACK (108), OP_2DROP (109), OP_2DUP (110), OP_3DUP (111), OP_2OVER (112), OP_2ROT (113), OP_2SWAP (114),
OP_IFDUP (115), OP_DEPTH (116), OP_DROP (117), OP_DUP (118), OP_NIP (119), OP_OVER (120), OP_PICK (121), OP_ROLL (122), OP_ROT (123), OP_SWAP (124),
OP_TUCK (125),
OP_CAT (126), OP_SUBSTR (127), OP_LEFT (128), OP_RIGHT (129), OP_SIZE (130), OP_INVERT (131), OP_AND (132), OP_OR (133), OP_XOR (134),
OP_EQUAL (135), OP_EQUALVERIFY (136),
OP_RESERVED1 (137), OP_RESERVED2 (138),
OP_1ADD (139), // 0x8b in out 1 is added to the input.
OP_1SUB (140), // 0x8c in out 1 is subtracted from the input.
OP_2MUL (141), // 0x8d in out The input is multiplied by 2. Currently
// disabled.
OP_2DIV (142), // 0x8e in out The input is divided by 2. Currently
// disabled.
OP_NEGATE (143), // 0x8f in out The sign of the input is flipped.
OP_ABS (144), // 0x90 in out The input is made positive.
OP_NOT (145), // 0x91 in out If the input is 0 or 1, it is flipped.
// Otherwise the output will be 0.
OP_0NOTEQUAL (146), // 0x92 in out Returns 0 if the input is 0. 1
// otherwise.
OP_ADD (147), // 0x93 a b out a is added to b.
OP_SUB (148), // 0x94 a b out b is subtracted from a.
OP_MUL (149), // 0x95 a b out a is multiplied by b. Currently disabled.
OP_DIV (150), // 0x96 a b out a is divided by b. Currently disabled.
OP_MOD (151), // 0x97 a b out Returns the remainder after dividing a by
// b. Currently disabled.
OP_LSHIFT (152), // 0x98 a b out Shifts a left b bits, preserving sign.
// Currently disabled.
OP_RSHIFT (153), // 0x99 a b out Shifts a right b bits, preserving sign.
// Currently disabled.
OP_BOOLAND (154), // 0x9a a b out If both a and b are not 0, the output
// is 1. Otherwise 0.
OP_BOOLOR (155), // 0x9b a b out If a or b is not 0, the output is 1.
// Otherwise 0.
OP_NUMEQUAL (156), // 0x9c a b out Returns 1 if the numbers are equal, 0
// otherwise.
OP_NUMEQUALVERIFY (157), // 0x9d a b out Same as OP_NUMEQUAL, but runs
// OP_VERIFY afterward.
OP_NUMNOTEQUAL (158), // 0x9e a b out Returns 1 if the numbers are not
// equal, 0 otherwise.
OP_LESSTHAN (159), // 0x9f a b out Returns 1 if a is less than b, 0
// otherwise.
OP_GREATERTHAN (160), // 0xa0 a b out Returns 1 if a is greater than b,
// 0
// otherwise.
OP_LESSTHANOREQUAL (161), // 0xa1 a b out Returns 1 if a is less than or
// equal to b, 0 otherwise.
OP_GREATERTHANOREQUAL (162), // 0xa2 a b out Returns 1 if a is greater
// than or equal to b, 0 otherwise.
OP_MIN (163), // 0xa3 a b out Returns the smaller of a and b.
OP_MAX (164), // 0xa4 a b out Returns the larger of a and b.
OP_WITHIN (165), // 0xa5 x min max out Returns 1 if x is within the
// specified range (left-inclusive), 0 otherwise.
OP_RIPEMD160 (166), // 0xa6 in hash The input is hashed using
// RIPEMD-160.
OP_SHA1 (167), // 0xa7 in hash The input is hashed using SHA-1.
OP_SHA256 (168), // 0xa8 in hash The input is hashed using SHA-256.
OP_HASH160 (169), // 0xa9 in hash The input is hashed twice: first with
// SHA-256 and then with RIPEMD-160.
OP_HASH256 (170), // 0xaa in hash The input is hashed two times with
// SHA-256.
OP_CODESEPARATOR (171), // 0xab Nothing Nothing All of the signature
// checking words will only match signatures to
// the data after the most recently-executed
// OP_CODESEPARATOR.
OP_CHECKSIG (172), // 0xac sig pubkey True / false The entire
// transaction's outputs, inputs, and script (from
// the most recently-executed OP_CODESEPARATOR to
// the end) are hashed. The signature used by
// OP_CHECKSIG must be a valid signature for this
// hash and public key. If it is, 1 is returned, 0
// otherwise.
OP_CHECKSIGVERIFY (173), // 0xad sig pubkey True / false Same as
// OP_CHECKSIG, but OP_VERIFY is executed
// afterward.
OP_CHECKMULTISIG (174), // 0xae x sig1 sig2 ... <number of signatures>
// pub1 pub2 <number of public keys> True /
// False For each signature and public key pair,
// OP_CHECKSIG is executed. If more public keys
// than signatures are listed, some key/sig
// pairs can fail. All signatures need to match
// a public key. If all signatures are valid, 1
// is returned, 0 otherwise. Due to a bug, one
// extra unused value is removed from the stack.
OP_CHECKMULTISIGVERIFY (175), // 0xaf x sig1 sig2 ... <number of
// signatures> pub1 pub2 ... <number of
// public keys> True / False Same as
// OP_CHECKMULTISIG, but OP_VERIFY is
// executed afterward.
OP_NOP1 (176), OP_NOP2 (177), OP_NOP3 (178), OP_NOP4 (179), OP_NOP5 (180), OP_NOP6 (181), OP_NOP7 (182), OP_NOP8 (183), OP_NOP9 (184), OP_NOP10 (185);
final int o;
Opcode (int n)
{
this.o = n;
}
}
public static class Token
{
public Opcode op;
public byte[] data;
public Token ()
{
}
public Token (Opcode op)
{
this.op = op;
data = null;
}
}
public static class Reader
{
private final byte[] bytes;
int cursor;
public Reader (byte[] script)
{
this.bytes = script;
this.cursor = 0;
}
public boolean eof ()
{
return cursor == bytes.length;
}
public byte[] readBytes (int n)
{
byte[] b = new byte[n];
System.arraycopy (bytes, cursor, b, 0, n);
cursor += n;
return b;
}
public void skipBytes (int n)
{
cursor += n;
}
public int readByte ()
{
return bytes[cursor++] & 0xff;
}
public long readInt16 ()
{
long value = ((bytes[cursor] & 0xFFL) << 0) | ((bytes[cursor + 1] & 0xFFL) << 8);
cursor += 2;
return value;
}
public long readInt32 ()
{
long value =
((bytes[cursor] & 0xFFL) << 0) | ((bytes[cursor + 1] & 0xFFL) << 8) | ((bytes[cursor + 2] & 0xFFL) << 16)
| ((bytes[cursor + 3] & 0xFFL) << 24);
cursor += 4;
return value;
}
}
public static class Writer
{
private final ByteArrayOutputStream s;
public Writer ()
{
s = new ByteArrayOutputStream ();
}
public Writer (ByteArrayOutputStream s)
{
this.s = s;
}
public void writeByte (int n)
{
s.write (n);
}
public void writeBytes (byte[] b)
{
try
{
s.write (b);
}
catch ( IOException e )
{
}
}
public void writeData (byte[] data)
{
if ( data.length <= 75 )
{
writeByte (data.length);
writeBytes (data);
}
else if ( data.length <= 0xff )
{
writeByte (Opcode.OP_PUSHDATA1.o);
writeByte (data.length);
writeBytes (data);
}
else if ( data.length <= 0xffff )
{
writeByte (Opcode.OP_PUSHDATA2.o);
writeInt16 (data.length);
writeBytes (data);
}
else if ( data.length <= 0x7fffffff )
{
writeByte (Opcode.OP_PUSHDATA4.o);
writeInt16 (data.length);
writeBytes (data);
}
}
public void writeToken (Token token)
{
s.write (token.op.o);
if ( token.data != null )
{
try
{
s.write (token.data);
}
catch ( IOException e )
{
}
}
}
public void writeInt16 (long n)
{
s.write ((int) (0xFFL & n));
s.write ((int) (0xFFL & (n >> 8)));
}
public void writeInt32 (long n)
{
s.write ((int) (0xFF & n));
s.write ((int) (0xFF & (n >> 8)));
s.write ((int) (0xFF & (n >> 16)));
s.write ((int) (0xFF & (n >> 24)));
}
public byte[] toByteArray ()
{
return s.toByteArray ();
}
}
public static class Tokenizer
{
private final Reader reader;
public Tokenizer (byte[] script)
{
reader = new Reader (script);
}
public boolean hashMoreElements ()
{
return !reader.eof ();
}
public int getCursor ()
{
return reader.cursor;
}
@SuppressWarnings ("incomplete-switch")
public Token nextToken () throws ValidationException
{
Token token = new Token ();
int ix = reader.readByte ();
if ( ix > 185 )
{
throw new ValidationException ("Invalid script" + ix + " opcode at " + reader.cursor);
}
Opcode op = Opcode.values ()[ix];
token.op = op;
if ( op.o <= 75 )
{
token.data = reader.readBytes (op.o);
return token;
}
switch ( op )
{
case OP_PUSHDATA1:
{
token.data = reader.readBytes (reader.readByte ());
break;
}
case OP_PUSHDATA2:
{
token.data = reader.readBytes ((int) reader.readInt16 ());
break;
}
case OP_PUSHDATA4:
{
token.data = reader.readBytes ((int) reader.readInt32 ());
break;
}
}
return token;
}
}
public static class Number
{
byte[] w;
public Number (byte[] b)
{
w = new byte[b.length];
System.arraycopy (b, 0, w, 0, b.length);
}
public Number (long n) throws ValidationException
{
if ( n == 0 )
{
w = new byte[0];
return;
}
boolean negative = false;
if ( n < 0 )
{
negative = true;
n = -n;
}
if ( n <= 0x7f )
{
w = new byte[] { (byte) (n & 0xff) };
w[0] |= negative ? 0x80 : 0;
return;
}
if ( n <= 0x7fff )
{
w = new byte[] { (byte) (n & 0xff), (byte) ((n >> 8) & 0xff) };
w[1] |= negative ? 0x80 : 0;
return;
}
if ( n <= 0x7fffff )
{
w = new byte[] { (byte) (n & 0xff), (byte) ((n >> 8) & 0xff), (byte) ((n >> 16) & 0xff) };
w[2] |= negative ? 0x80 : 0;
return;
}
w = new byte[] { (byte) (n & 0xff), (byte) ((n >> 8) & 0xff), (byte) ((n >> 16) & 0xff), (byte) ((n >> 24) & 0xff) };
if ( ((n >> 24) & 0x80) != 0 )
{
byte[] tmp = new byte[5];
System.arraycopy (w, 0, tmp, 0, 4);
w = tmp;
}
w[w.length - 1] |= negative ? 0x80 : 0;
}
public byte[] toByteArray ()
{
byte[] tmp = new byte[w.length];
System.arraycopy (w, 0, tmp, 0, w.length);
return tmp;
}
public long intValue () throws ValidationException
{
if ( w.length == 0 )
{
return 0;
}
boolean negative = false;
if ( (w[w.length - 1] & 0x80) != 0 )
{
negative = true;
w[w.length - 1] &= 0x7f;
}
int n = 0;
if ( w.length > 0 )
{
n += w[0] & 0xff;
}
if ( w.length > 1 )
{
n += (w[1] & 0xff) << 8;
}
if ( w.length > 2 )
{
n += (w[2] & 0xff) << 16;
}
if ( w.length > 3 )
{
n += (w[3] & 0xff) << 24;
}
if ( negative )
{
n = -n;
}
return n;
}
}
public static int intValue (byte[] n) throws ValidationException
{
return (int) new ScriptFormat.Number (n).intValue ();
}
public static List<ScriptFormat.Token> parse (byte[] script) throws ValidationException
{
List<ScriptFormat.Token> p = new ArrayList<ScriptFormat.Token> ();
ScriptFormat.Tokenizer tokenizer = new ScriptFormat.Tokenizer (script);
while ( tokenizer.hashMoreElements () )
{
p.add (tokenizer.nextToken ());
}
return p;
}
public static boolean isPushOnly (byte[] script) throws ValidationException
{
for ( ScriptFormat.Token t : parse (script) )
{
if ( t.op.o > 78 )
{
return false;
}
}
return true;
}
@SuppressWarnings ("incomplete-switch")
public static int sigOpCount (byte[] script) throws ValidationException
{
int nsig = 0;
ScriptFormat.Opcode last = ScriptFormat.Opcode.OP_FALSE;
ScriptFormat.Tokenizer tokenizer = new ScriptFormat.Tokenizer (script);
while ( tokenizer.hashMoreElements () )
{
ScriptFormat.Token token = tokenizer.nextToken ();
if ( token.data == null )
{
switch ( token.op )
{
case OP_CHECKSIG:
case OP_CHECKSIGVERIFY:
++nsig;
break;
case OP_CHECKMULTISIG:
case OP_CHECKMULTISIGVERIFY:
// https://en.bitcoin.it/wiki/BIP_0016
if ( last.o >= 0 && last.o <= 16 )
{
nsig += last.o;
}
else
{
nsig += 20;
}
break;
}
last = token.op;
}
}
return nsig;
}
public static byte[] fromReadable (String s)
{
ScriptFormat.Writer writer = new ScriptFormat.Writer ();
StringTokenizer tokenizer = new StringTokenizer (s, " ");
while ( tokenizer.hasMoreElements () )
{
String token = tokenizer.nextToken ();
ScriptFormat.Opcode op = ScriptFormat.Opcode.OP_FALSE;
if ( token.startsWith ("0x") )
{
byte[] data = ByteUtils.fromHex (token.substring (2));
writer.writeBytes (data);
}
else if ( token.startsWith ("'") )
{
String str = token.substring (1, token.length () - 1);
try
{
writer.writeData (str.getBytes ("US-ASCII"));
}
catch ( UnsupportedEncodingException e )
{
}
}
else if ( (token.startsWith ("-") || token.startsWith ("0") || token.startsWith ("1") || token.startsWith ("2") || token.startsWith ("3")
|| token.startsWith ("4") || token.startsWith ("5") || token.startsWith ("6") || token.startsWith ("7") || token.startsWith ("8") || token
.startsWith ("9"))
&& !token.equals ("0NOTEQUAL")
&& !token.equals ("1NEGATE")
&& !token.equals ("2DROP")
&& !token.equals ("2DUP")
&& !token.equals ("3DUP")
&& !token.equals ("2OVER")
&& !token.equals ("2ROT")
&& !token.equals ("2SWAP")
&& !token.equals ("1ADD")
&& !token.equals ("1SUB") && !token.equals ("2MUL") && !token.equals ("2DIV") && !token.equals ("2SWAP") )
{
try
{
long n = Long.valueOf (token).longValue ();
if ( n >= 1 && n <= 16 )
{
writer.writeByte (Opcode.OP_1.o + (int) n - 1);
}
else
{
writer.writeData (new Number (n).toByteArray ());
}
}
catch ( NumberFormatException e )
{
}
catch ( ValidationException e )
{
}
}
else
{
if ( token.startsWith ("OP_") )
{
op = ScriptFormat.Opcode.valueOf (token);
}
else
{
op = ScriptFormat.Opcode.valueOf ("OP_" + token);
}
writer.writeByte (op.o);
}
}
return writer.toByteArray ();
}
public static String toReadable (byte[] script) throws ValidationException
{
List<ScriptFormat.Token> tokens = parse (script);
StringBuffer b = new StringBuffer ();
boolean first = true;
for ( ScriptFormat.Token token : tokens )
{
if ( !first )
{
b.append (" ");
}
first = false;
if ( token.data != null )
{
if ( token.data.length > 0 )
{
b.append ("0x" + ByteUtils.toHex (token.data));
}
else
{
b.append ("0x0");
}
}
else
{
b.append (token.op.toString ().substring (2));
}
}
return b.toString ();
}
public static boolean isPayToScriptHash (byte[] script)
{
try
{
List<ScriptFormat.Token> parsed = parse (script);
return parsed.size () == 3 && parsed.get (0).op == ScriptFormat.Opcode.OP_HASH160 && (parsed.get (1).data != null && parsed.get (1).op.o <= 75)
&& parsed.get (1).data.length == 20 && parsed.get (2).op == ScriptFormat.Opcode.OP_EQUAL;
}
catch ( ValidationException e )
{
return false;
}
}
public static boolean isPayToKey (byte[] script)
{
try
{
List<ScriptFormat.Token> parsed = parse (script);
return parsed.size () == 2 && parsed.get (0).data != null && parsed.get (1).op == ScriptFormat.Opcode.OP_CHECKSIG;
}
catch ( ValidationException e )
{
return false;
}
}
public static boolean isPayToAddress (byte[] script)
{
try
{
List<ScriptFormat.Token> parsed = parse (script);
return parsed.size () == 5 && parsed.get (0).op == ScriptFormat.Opcode.OP_DUP && parsed.get (1).op == ScriptFormat.Opcode.OP_HASH160
&& parsed.get (2).data != null && parsed.get (3).op == ScriptFormat.Opcode.OP_EQUALVERIFY
&& parsed.get (4).op == ScriptFormat.Opcode.OP_CHECKSIG;
}
catch ( ValidationException e )
{
return false;
}
}
public static boolean isMultiSig (byte[] script)
{
try
{
List<ScriptFormat.Token> parsed = parse (script);
int nkeys = -1;
int nvotes = -1;
for ( int i = 0; i < parsed.size (); ++i )
{
if ( parsed.get (i).op == ScriptFormat.Opcode.OP_CHECKMULTISIG || parsed.get (i).op == ScriptFormat.Opcode.OP_CHECKMULTISIGVERIFY )
{
nkeys = parsed.get (i - 1).op.ordinal () - ScriptFormat.Opcode.OP_1.ordinal () + 1;
nvotes = parsed.get (i - nkeys - 2).op.ordinal () - ScriptFormat.Opcode.OP_1.ordinal () + 1;
break;
}
}
if ( nkeys <= 0 || nkeys > 3 )
{
return false;
}
if ( parsed.size () != nkeys + 3 )
{
return false;
}
if ( nvotes < 0 || nvotes > nkeys )
{
return false;
}
}
catch ( ValidationException e )
{
return false;
}
return true;
}
public static boolean isStandard (byte[] script)
{
return isPayToAddress (script) || isPayToKey (script) || isPayToScriptHash (script) || isMultiSig (script);
}
public static byte[] getPayToAddressScript (byte[] keyHash)
{
ScriptFormat.Writer writer = new ScriptFormat.Writer ();
writer.writeToken (new ScriptFormat.Token (ScriptFormat.Opcode.OP_DUP));
writer.writeToken (new ScriptFormat.Token (ScriptFormat.Opcode.OP_HASH160));
writer.writeData (keyHash);
writer.writeToken (new ScriptFormat.Token (ScriptFormat.Opcode.OP_EQUALVERIFY));
writer.writeToken (new ScriptFormat.Token (ScriptFormat.Opcode.OP_CHECKSIG));
return writer.toByteArray ();
}
public static byte[] deleteSignatureFromScript (byte[] script, byte[] sig) throws ValidationException
{
ScriptFormat.Tokenizer tokenizer = new ScriptFormat.Tokenizer (script);
ScriptFormat.Writer writer = new ScriptFormat.Writer ();
while ( tokenizer.hashMoreElements () )
{
ScriptFormat.Token token = tokenizer.nextToken ();
if ( token.data != null && token.data.length == sig.length )
{
boolean found = true;
for ( int i = 0; i < sig.length; ++i )
{
if ( sig[i] != token.data[i] )
{
found = false;
break;
}
}
if ( !found )
{
writer.writeToken (token);
}
}
else
{
writer.writeToken (token);
}
}
return writer.toByteArray ();
}
}
| bitcoind tests | src/main/java/com/bitsofproof/supernode/api/ScriptFormat.java | bitcoind tests |
|
Java | apache-2.0 | 0f7378d567bd5b1d6874adf8e8d1b0887588bd6a | 0 | bozimmerman/CoffeeMud,bozimmerman/CoffeeMud,bozimmerman/CoffeeMud,bozimmerman/CoffeeMud | package com.planet_ink.coffee_mud.Abilities.Archon;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2004-2017 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Archon_Metacraft extends ArchonSkill
{
@Override
public String ID()
{
return "Archon_Metacraft";
}
private final static String localizedName = CMLib.lang().L("Metacrafting");
@Override
public String name()
{
return localizedName;
}
private static final String[] triggerStrings = I(new String[] { "METACRAFT" });
@Override
public String[] triggerStrings()
{
return triggerStrings;
}
public static List<Ability> craftingSkills = new Vector<Ability>();
protected String replacePercent(String thisStr, String withThis)
{
if(withThis.length()==0)
{
int x=thisStr.indexOf("% ");
if(x>=0)
return new StringBuffer(thisStr).replace(x,x+2,withThis).toString();
x=thisStr.indexOf(" %");
if(x>=0)
return new StringBuffer(thisStr).replace(x,x+2,withThis).toString();
x=thisStr.indexOf('%');
if(x>=0)
return new StringBuffer(thisStr).replace(x,x+1,withThis).toString();
}
else
{
final int x=thisStr.indexOf('%');
if(x>=0)
return new StringBuffer(thisStr).replace(x,x+1,withThis).toString();
}
return thisStr;
}
@Override
public boolean invoke(MOB mob, List<String> commands, Physical givenTarget, boolean auto, int asLevel)
{
if(craftingSkills.size()==0)
{
final Vector<Ability> V=new Vector<Ability>();
for(final Enumeration<Ability> e=CMClass.abilities();e.hasMoreElements();)
{
final Ability A=e.nextElement();
if(A instanceof ItemCraftor)
V.addElement((Ability)A.copyOf());
}
while(V.size()>0)
{
int lowest=Integer.MAX_VALUE;
Ability lowestA=null;
for(int i=0;i<V.size();i++)
{
final Ability A=V.elementAt(i);
final int ii=CMLib.ableMapper().lowestQualifyingLevel(A.ID());
if(ii<lowest)
{
lowest=ii;
lowestA=A;
}
}
if(lowestA==null)
lowestA=V.firstElement();
if(lowestA!=null)
{
V.removeElement(lowestA);
craftingSkills.add(lowestA);
}
else
break;
}
}
if(commands.size()<1)
{
mob.tell(L("Metacraft what ([recipe], everything, every [recipe], all [skill name]), (optionally) out of what material, and (optionally) to self, to here, or to file [FILENAME]?"));
return false;
}
String mat=null;
String toWHERE = "SELF";
if(commands.size()>1)
{
for(int x=1;x<commands.size()-1;x++)
{
if(commands.get(x).equalsIgnoreCase("to"))
{
if(commands.get(x+1).equalsIgnoreCase("self"))
{
toWHERE="SELF";
commands.remove(x);
commands.remove(x);
break;
}
if(commands.get(x+1).equalsIgnoreCase("here"))
{
toWHERE="HERE";
commands.remove(x);
commands.remove(x);
break;
}
if(commands.get(x+1).equalsIgnoreCase("file")&&(x<commands.size()-2))
{
toWHERE=commands.get(x+2);
commands.remove(x);
commands.remove(x);
commands.remove(x);
break;
}
}
}
if(commands.size()>1)
{
mat=(commands.get(commands.size()-1)).toUpperCase();
commands.remove(commands.size()-1);
}
}
int material=-1;
List<Ability> craftingSkills=Archon_Metacraft.craftingSkills;
if(mat!=null)
material=RawMaterial.CODES.FIND_StartsWith(mat);
if((mat!=null)&&(material<0))
{
mob.tell(L("'@x1' is not a recognized material.",mat));
return false;
}
else
if(material>0)
{
craftingSkills = new ArrayList<Ability>();
for(Ability A : Archon_Metacraft.craftingSkills)
{
ItemCraftor craft = (ItemCraftor)A;
if(craft.myResources().contains(Integer.valueOf(material)))
craftingSkills.add(A);
}
if(craftingSkills.size()==0)
craftingSkills=Archon_Metacraft.craftingSkills;
}
ItemCraftor skill=null;
String recipe=CMParms.combine(commands,0);
List<Pair<Ability,String>> skillsToUse=new Vector<Pair<Ability,String>>();
boolean everyFlag=false;
if(recipe.equalsIgnoreCase("everything"))
{
skillsToUse=new XVector<Pair<Ability,String>>();
for(Ability A : craftingSkills)
skillsToUse.add(new Pair<Ability,String>(A,"*"));
everyFlag=true;
recipe=null;
}
else
if(recipe.toUpperCase().startsWith("EVERY "))
{
everyFlag=true;
recipe=recipe.substring(6).trim();
for(int i=0;i<craftingSkills.size();i++)
{
skill=(ItemCraftor)craftingSkills.get(i);
final List<List<String>> V=skill.matchingRecipeNames(recipe,false);
if((V!=null)&&(V.size()>0))
{
for(List<String> V2 : V)
skillsToUse.add(new Pair<Ability,String>(skill,replacePercent(V2.get(0),"")));
}
}
if(skillsToUse.size()==0)
{
for(int i=0;i<craftingSkills.size();i++)
{
skill=(ItemCraftor)craftingSkills.get(i);
final List<List<String>> V=skill.matchingRecipeNames(recipe,true);
if((V!=null)&&(V.size()>0))
{
for(List<String> V2 : V)
skillsToUse.add(new Pair<Ability,String>(skill,replacePercent(V2.get(0),"")));
}
}
}
}
else
if(recipe.toUpperCase().startsWith("ALL "))
{
everyFlag=true;
String skillName=recipe.toUpperCase().substring(4);
skill = (ItemCraftor)CMLib.english().fetchEnvironmental(craftingSkills, skillName, true);
if(skill == null)
skill = (ItemCraftor)CMLib.english().fetchEnvironmental(craftingSkills, skillName, false);
if(skill == null)
{
mob.tell(L("'@x1' is not a known crafting skill.",recipe));
return false;
}
skillsToUse=new XVector<Pair<Ability,String>>();
skillsToUse.add(new Pair<Ability,String>(skill,"*"));
recipe=null;
}
else
{
for(int i=0;i<craftingSkills.size();i++)
{
skill=(ItemCraftor)craftingSkills.get(i);
final List<List<String>> V=skill.matchingRecipeNames(recipe,false);
if((V!=null)&&(V.size()>0))
skillsToUse.add(new Pair<Ability,String>(skill,this.replacePercent(V.get(0).get(0), "")));
}
if(skillsToUse.size()==0)
{
for(int i=0;i<craftingSkills.size();i++)
{
skill=(ItemCraftor)craftingSkills.get(i);
final List<List<String>> V=skill.matchingRecipeNames(recipe,true);
if((V!=null)&&(V.size()>0))
skillsToUse.add(new Pair<Ability,String>(skill,this.replacePercent(V.get(0).get(0), "")));
}
}
}
if(skillsToUse.size()==0)
{
mob.tell(L("'@x1' can not be made with any of the known crafting skills.",recipe));
return false;
}
boolean success=false;
final StringBuffer xml = new StringBuffer("<ITEMS>");
final HashSet<String> files = new HashSet<String>();
for(int s=0;s<skillsToUse.size();s++)
{
skill=(ItemCraftor)skillsToUse.get(s).first;
final String recipeName = skillsToUse.get(s).second;
final List<Item> items=new Vector<Item>();
if(everyFlag)
{
if(recipe==null)
{
List<ItemCraftor.ItemKeyPair> V=null;
if(material>=0)
V=skill.craftAllItemSets(material,false);
else
V=skill.craftAllItemSets(false);
if(V!=null)
{
for(final ItemCraftor.ItemKeyPair L: V)
{
items.add(L.item);
if(L.key!=null)
items.add(L.key);
}
}
}
else
if(material>=0)
{
final ItemCraftor.ItemKeyPair pair = skill.craftItem(recipeName,material,false);
if(pair!=null)
items.addAll(pair.asList());
}
else
{
final ItemCraftor.ItemKeyPair pair = skill.craftItem(recipeName);
if(pair!=null)
items.addAll(pair.asList());
}
}
else
if(material>=0)
{
final ItemCraftor.ItemKeyPair pair = skill.craftItem(recipeName,material,false);
if(pair!=null)
items.addAll(pair.asList());
}
else
{
final ItemCraftor.ItemKeyPair pair = skill.craftItem(recipeName);
if(pair!=null)
items.addAll(pair.asList());
}
if(items.size()==0)
continue;
success=true;
if(toWHERE.equals("SELF")||toWHERE.equals("HERE"))
{
for(final Item building : items)
{
if(building instanceof ClanItem)
{
final Pair<Clan,Integer> p=CMLib.clans().findPrivilegedClan(mob, Clan.Function.ENCHANT);
if(p!=null)
{
final Clan C=p.first;
final String clanName=(" "+C.getGovernmentName()+" "+C.name());
building.setName(CMStrings.replaceFirst(building.Name(), " Clan None", clanName));
building.setDisplayText(CMStrings.replaceFirst(building.displayText(), " Clan None", clanName));
building.setDescription(CMStrings.replaceFirst(building.description(), " Clan None", clanName));
((ClanItem)building).setClanID(C.clanID());
}
}
if(toWHERE.equals("HERE"))
{
mob.location().addItem(building,ItemPossessor.Expire.Player_Drop);
mob.location().show(mob,null,null,CMMsg.MSG_OK_ACTION,L("@x1 appears here.",building.name()));
}
else
{
mob.moveItemTo(building);
mob.location().show(mob,null,null,CMMsg.MSG_OK_ACTION,L("@x1 appears in <S-YOUPOSS> hands.",building.name()));
}
}
}
else
xml.append(CMLib.coffeeMaker().getItemsXML(items,new Hashtable<String,List<Item>>(),files,null));
mob.location().recoverPhyStats();
if(!everyFlag)
break;
}
if(success
&&(!toWHERE.equals("SELF"))
&&(!toWHERE.equals("HERE")))
{
final CMFile file = new CMFile(toWHERE,mob);
if(!file.canWrite())
mob.tell(L("Unable to open file '@x1' for writing.",toWHERE));
else
{
xml.append("</ITEMS>");
if(files.size()>0)
{
final StringBuffer str=new StringBuffer("<FILES>");
for(final Iterator<String> i=files.iterator();i.hasNext();)
{
final String filename=i.next();
final StringBuffer buf=new CMFile(Resources.makeFileResourceName(filename),null,CMFile.FLAG_LOGERRORS).text();
if((buf!=null)&&(buf.length()>0))
{
str.append("<FILE NAME=\""+filename+"\">");
str.append(buf);
str.append("</FILE>");
}
}
str.append("</FILES>");
xml.append(str);
}
file.saveText(xml);
mob.tell(L("File '@x1' written.",file.getAbsolutePath()));
}
}
if(!success)
{
mob.tell(L("The metacraft failed."));
return false;
}
return true;
}
}
| com/planet_ink/coffee_mud/Abilities/Archon/Archon_Metacraft.java | package com.planet_ink.coffee_mud.Abilities.Archon;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2004-2017 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Archon_Metacraft extends ArchonSkill
{
@Override
public String ID()
{
return "Archon_Metacraft";
}
private final static String localizedName = CMLib.lang().L("Metacrafting");
@Override
public String name()
{
return localizedName;
}
private static final String[] triggerStrings = I(new String[] { "METACRAFT" });
@Override
public String[] triggerStrings()
{
return triggerStrings;
}
public static List<Ability> craftingSkills = new Vector<Ability>();
protected String replacePercent(String thisStr, String withThis)
{
if(withThis.length()==0)
{
int x=thisStr.indexOf("% ");
if(x>=0)
return new StringBuffer(thisStr).replace(x,x+2,withThis).toString();
x=thisStr.indexOf(" %");
if(x>=0)
return new StringBuffer(thisStr).replace(x,x+2,withThis).toString();
x=thisStr.indexOf('%');
if(x>=0)
return new StringBuffer(thisStr).replace(x,x+1,withThis).toString();
}
else
{
final int x=thisStr.indexOf('%');
if(x>=0)
return new StringBuffer(thisStr).replace(x,x+1,withThis).toString();
}
return thisStr;
}
@Override
public boolean invoke(MOB mob, List<String> commands, Physical givenTarget, boolean auto, int asLevel)
{
if(craftingSkills.size()==0)
{
final Vector<Ability> V=new Vector<Ability>();
for(final Enumeration<Ability> e=CMClass.abilities();e.hasMoreElements();)
{
final Ability A=e.nextElement();
if(A instanceof ItemCraftor)
V.addElement((Ability)A.copyOf());
}
while(V.size()>0)
{
int lowest=Integer.MAX_VALUE;
Ability lowestA=null;
for(int i=0;i<V.size();i++)
{
final Ability A=V.elementAt(i);
final int ii=CMLib.ableMapper().lowestQualifyingLevel(A.ID());
if(ii<lowest)
{
lowest=ii;
lowestA=A;
}
}
if(lowestA==null)
lowestA=V.firstElement();
if(lowestA!=null)
{
V.removeElement(lowestA);
craftingSkills.add(lowestA);
}
else
break;
}
}
if(commands.size()<1)
{
mob.tell(L("Metacraft what ([recipe], everything, every [recipe], all [skill name]), (optionally) out of what material, and (optionally) to self, to here, or to file [FILENAME]?"));
return false;
}
String mat=null;
String toWHERE = "SELF";
if(commands.size()>1)
{
for(int x=1;x<commands.size()-1;x++)
{
if(commands.get(x).equalsIgnoreCase("to"))
{
if(commands.get(x+1).equalsIgnoreCase("self"))
{
toWHERE="SELF";
commands.remove(x);
commands.remove(x);
break;
}
if(commands.get(x+1).equalsIgnoreCase("here"))
{
toWHERE="HERE";
commands.remove(x);
commands.remove(x);
break;
}
if(commands.get(x+1).equalsIgnoreCase("file")&&(x<commands.size()-2))
{
toWHERE=commands.get(x+2);
commands.remove(x);
commands.remove(x);
commands.remove(x);
break;
}
}
}
if(commands.size()>1)
{
mat=(commands.get(commands.size()-1)).toUpperCase();
commands.remove(commands.size()-1);
}
}
int material=-1;
if(mat!=null)
material=RawMaterial.CODES.FIND_StartsWith(mat);
if((mat!=null)&&(material<0))
{
mob.tell(L("'@x1' is not a recognized material.",mat));
return false;
}
ItemCraftor skill=null;
String recipe=CMParms.combine(commands,0);
List<Pair<Ability,String>> skillsToUse=new Vector<Pair<Ability,String>>();
boolean everyFlag=false;
if(recipe.equalsIgnoreCase("everything"))
{
skillsToUse=new XVector<Pair<Ability,String>>();
for(Ability A : craftingSkills)
skillsToUse.add(new Pair<Ability,String>(A,"*"));
everyFlag=true;
recipe=null;
}
else
if(recipe.toUpperCase().startsWith("EVERY "))
{
everyFlag=true;
recipe=recipe.substring(6).trim();
for(int i=0;i<craftingSkills.size();i++)
{
skill=(ItemCraftor)craftingSkills.get(i);
final List<List<String>> V=skill.matchingRecipeNames(recipe,false);
if((V!=null)&&(V.size()>0))
{
for(List<String> V2 : V)
skillsToUse.add(new Pair<Ability,String>(skill,replacePercent(V2.get(0),"")));
}
}
if(skillsToUse.size()==0)
{
for(int i=0;i<craftingSkills.size();i++)
{
skill=(ItemCraftor)craftingSkills.get(i);
final List<List<String>> V=skill.matchingRecipeNames(recipe,true);
if((V!=null)&&(V.size()>0))
{
for(List<String> V2 : V)
skillsToUse.add(new Pair<Ability,String>(skill,replacePercent(V2.get(0),"")));
}
}
}
}
else
if(recipe.toUpperCase().startsWith("ALL "))
{
everyFlag=true;
String skillName=recipe.toUpperCase().substring(4);
skill = (ItemCraftor)CMLib.english().fetchEnvironmental(craftingSkills, skillName, true);
if(skill == null)
skill = (ItemCraftor)CMLib.english().fetchEnvironmental(craftingSkills, skillName, false);
if(skill == null)
{
mob.tell(L("'@x1' is not a known crafting skill.",recipe));
return false;
}
skillsToUse=new XVector<Pair<Ability,String>>();
skillsToUse.add(new Pair<Ability,String>(skill,"*"));
recipe=null;
}
else
{
for(int i=0;i<craftingSkills.size();i++)
{
skill=(ItemCraftor)craftingSkills.get(i);
final List<List<String>> V=skill.matchingRecipeNames(recipe,false);
if((V!=null)&&(V.size()>0))
skillsToUse.add(new Pair<Ability,String>(skill,this.replacePercent(V.get(0).get(0), "")));
}
if(skillsToUse.size()==0)
{
for(int i=0;i<craftingSkills.size();i++)
{
skill=(ItemCraftor)craftingSkills.get(i);
final List<List<String>> V=skill.matchingRecipeNames(recipe,true);
if((V!=null)&&(V.size()>0))
skillsToUse.add(new Pair<Ability,String>(skill,this.replacePercent(V.get(0).get(0), "")));
}
}
}
if(skillsToUse.size()==0)
{
mob.tell(L("'@x1' can not be made with any of the known crafting skills.",recipe));
return false;
}
boolean success=false;
final StringBuffer xml = new StringBuffer("<ITEMS>");
final HashSet<String> files = new HashSet<String>();
for(int s=0;s<skillsToUse.size();s++)
{
skill=(ItemCraftor)skillsToUse.get(s).first;
final String recipeName = skillsToUse.get(s).second;
final List<Item> items=new Vector<Item>();
if(everyFlag)
{
if(recipe==null)
{
List<ItemCraftor.ItemKeyPair> V=null;
if(material>=0)
V=skill.craftAllItemSets(material,false);
else
V=skill.craftAllItemSets(false);
if(V!=null)
{
for(final ItemCraftor.ItemKeyPair L: V)
{
items.add(L.item);
if(L.key!=null)
items.add(L.key);
}
}
}
else
if(material>=0)
{
final ItemCraftor.ItemKeyPair pair = skill.craftItem(recipeName,material,false);
if(pair!=null)
items.addAll(pair.asList());
}
else
{
final ItemCraftor.ItemKeyPair pair = skill.craftItem(recipeName);
if(pair!=null)
items.addAll(pair.asList());
}
}
else
if(material>=0)
{
final ItemCraftor.ItemKeyPair pair = skill.craftItem(recipeName,material,false);
if(pair!=null)
items.addAll(pair.asList());
}
else
{
final ItemCraftor.ItemKeyPair pair = skill.craftItem(recipeName);
if(pair!=null)
items.addAll(pair.asList());
}
if(items.size()==0)
continue;
success=true;
if(toWHERE.equals("SELF")||toWHERE.equals("HERE"))
{
for(final Item building : items)
{
if(building instanceof ClanItem)
{
final Pair<Clan,Integer> p=CMLib.clans().findPrivilegedClan(mob, Clan.Function.ENCHANT);
if(p!=null)
{
final Clan C=p.first;
final String clanName=(" "+C.getGovernmentName()+" "+C.name());
building.setName(CMStrings.replaceFirst(building.Name(), " Clan None", clanName));
building.setDisplayText(CMStrings.replaceFirst(building.displayText(), " Clan None", clanName));
building.setDescription(CMStrings.replaceFirst(building.description(), " Clan None", clanName));
((ClanItem)building).setClanID(C.clanID());
}
}
if(toWHERE.equals("HERE"))
{
mob.location().addItem(building,ItemPossessor.Expire.Player_Drop);
mob.location().show(mob,null,null,CMMsg.MSG_OK_ACTION,L("@x1 appears here.",building.name()));
}
else
{
mob.moveItemTo(building);
mob.location().show(mob,null,null,CMMsg.MSG_OK_ACTION,L("@x1 appears in <S-YOUPOSS> hands.",building.name()));
}
}
}
else
xml.append(CMLib.coffeeMaker().getItemsXML(items,new Hashtable<String,List<Item>>(),files,null));
mob.location().recoverPhyStats();
if(!everyFlag)
break;
}
if(success
&&(!toWHERE.equals("SELF"))
&&(!toWHERE.equals("HERE")))
{
final CMFile file = new CMFile(toWHERE,mob);
if(!file.canWrite())
mob.tell(L("Unable to open file '@x1' for writing.",toWHERE));
else
{
xml.append("</ITEMS>");
if(files.size()>0)
{
final StringBuffer str=new StringBuffer("<FILES>");
for(final Iterator<String> i=files.iterator();i.hasNext();)
{
final String filename=i.next();
final StringBuffer buf=new CMFile(Resources.makeFileResourceName(filename),null,CMFile.FLAG_LOGERRORS).text();
if((buf!=null)&&(buf.length()>0))
{
str.append("<FILE NAME=\""+filename+"\">");
str.append(buf);
str.append("</FILE>");
}
}
str.append("</FILES>");
xml.append(str);
}
file.saveText(xml);
mob.tell(L("File '@x1' written.",file.getAbsolutePath()));
}
}
if(!success)
{
mob.tell(L("The metacraft failed."));
return false;
}
return true;
}
}
| Metacraft is much better now.
git-svn-id: 0cdf8356e41b2d8ccbb41bb76c82068fe80b2514@15584 0d6f1817-ed0e-0410-87c9-987e46238f29
| com/planet_ink/coffee_mud/Abilities/Archon/Archon_Metacraft.java | Metacraft is much better now. |
|
Java | apache-2.0 | 16d466d301ac01c92584ab7d930e4158b2739221 | 0 | rjainqb/jets3t-rj,rjainqb/jets3t-rj | /*
* jets3t : Java Extra-Tasty S3 Toolkit (for Amazon S3 online storage service)
* This is a java.net project, see https://jets3t.dev.java.net/
*
* Copyright 2008 James Murty
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jets3t.apps.cockpit;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Frame;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.Rectangle;
import java.awt.datatransfer.DataFlavor;
import java.awt.dnd.DnDConstants;
import java.awt.dnd.DropTarget;
import java.awt.dnd.DropTargetDragEvent;
import java.awt.dnd.DropTargetDropEvent;
import java.awt.dnd.DropTargetEvent;
import java.awt.dnd.DropTargetListener;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.WindowEvent;
import java.awt.event.WindowListener;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import javax.swing.JApplet;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JComboBox;
import javax.swing.JComponent;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import javax.swing.JScrollPane;
import javax.swing.JSeparator;
import javax.swing.JSplitPane;
import javax.swing.JTable;
import javax.swing.JTextField;
import javax.swing.ListSelectionModel;
import javax.swing.SwingUtilities;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.table.DefaultTableCellRenderer;
import javax.swing.table.TableColumn;
import org.apache.commons.httpclient.Credentials;
import org.apache.commons.httpclient.NTCredentials;
import org.apache.commons.httpclient.UsernamePasswordCredentials;
import org.apache.commons.httpclient.auth.AuthScheme;
import org.apache.commons.httpclient.auth.CredentialsNotAvailableException;
import org.apache.commons.httpclient.auth.CredentialsProvider;
import org.apache.commons.httpclient.auth.NTLMScheme;
import org.apache.commons.httpclient.auth.RFC2617Scheme;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jets3t.apps.cockpit.gui.AccessControlDialog;
import org.jets3t.apps.cockpit.gui.BucketLoggingDialog;
import org.jets3t.apps.cockpit.gui.BucketTableModel;
import org.jets3t.apps.cockpit.gui.CreateBucketDialog;
import org.jets3t.apps.cockpit.gui.ObjectTableModel;
import org.jets3t.apps.cockpit.gui.PreferencesDialog;
import org.jets3t.apps.cockpit.gui.RequesterPaysDialog;
import org.jets3t.apps.cockpit.gui.SignedGetUrlDialog;
import org.jets3t.apps.cockpit.gui.StartupDialog;
import org.jets3t.gui.AuthenticationDialog;
import org.jets3t.gui.CopyObjectsDialog;
import org.jets3t.gui.ErrorDialog;
import org.jets3t.gui.GuiUtils;
import org.jets3t.gui.HyperlinkActivatedListener;
import org.jets3t.gui.ItemPropertiesDialog;
import org.jets3t.gui.JHtmlLabel;
import org.jets3t.gui.ManageDistributionsDialog;
import org.jets3t.gui.ObjectsAttributesDialog;
import org.jets3t.gui.ProgressDialog;
import org.jets3t.gui.TableSorter;
import org.jets3t.gui.skins.SkinsFactory;
import org.jets3t.service.CloudFrontService;
import org.jets3t.service.CloudFrontServiceException;
import org.jets3t.service.Constants;
import org.jets3t.service.Jets3tProperties;
import org.jets3t.service.S3ObjectsChunk;
import org.jets3t.service.S3Service;
import org.jets3t.service.S3ServiceException;
import org.jets3t.service.acl.AccessControlList;
import org.jets3t.service.impl.rest.httpclient.RestS3Service;
import org.jets3t.service.io.BytesProgressWatcher;
import org.jets3t.service.model.S3Bucket;
import org.jets3t.service.model.S3Object;
import org.jets3t.service.model.cloudfront.Distribution;
import org.jets3t.service.multithread.CancelEventTrigger;
import org.jets3t.service.multithread.CopyObjectsEvent;
import org.jets3t.service.multithread.CreateBucketsEvent;
import org.jets3t.service.multithread.CreateObjectsEvent;
import org.jets3t.service.multithread.DeleteObjectsEvent;
import org.jets3t.service.multithread.DownloadObjectsEvent;
import org.jets3t.service.multithread.DownloadPackage;
import org.jets3t.service.multithread.GetObjectHeadsEvent;
import org.jets3t.service.multithread.GetObjectsEvent;
import org.jets3t.service.multithread.ListObjectsEvent;
import org.jets3t.service.multithread.LookupACLEvent;
import org.jets3t.service.multithread.S3ServiceEventListener;
import org.jets3t.service.multithread.S3ServiceMulti;
import org.jets3t.service.multithread.ServiceEvent;
import org.jets3t.service.multithread.ThreadWatcher;
import org.jets3t.service.multithread.UpdateACLEvent;
import org.jets3t.service.security.AWSCredentials;
import org.jets3t.service.security.EncryptionUtil;
import org.jets3t.service.utils.ByteFormatter;
import org.jets3t.service.utils.FileComparer;
import org.jets3t.service.utils.FileComparerResults;
import org.jets3t.service.utils.Mimetypes;
import org.jets3t.service.utils.ObjectUtils;
import org.jets3t.service.utils.TimeFormatter;
import com.centerkey.utils.BareBonesBrowserLaunch;
/**
* Cockpit is a graphical Java application for viewing and managing the contents of an Amazon S3 account.
* For more information and help please see the
* <a href="http://jets3t.s3.amazonaws.com/applications/cockpit.html">Cockpit Guide</a>.
* <p>
* This is the Cockpit application class; it may be run as a stand-alone application or as an Applet.
*
* @author jmurty
*/
public class Cockpit extends JApplet implements S3ServiceEventListener, ActionListener,
ListSelectionListener, HyperlinkActivatedListener, CredentialsProvider
{
private static final long serialVersionUID = 1275456909864052884L;
private static final Log log = LogFactory.getLog(Cockpit.class);
public static final String JETS3T_COCKPIT_HELP_PAGE = "http://jets3t.s3.amazonaws.com/applications/cockpit.html";
public static final String AMAZON_S3_PAGE = "http://www.amazon.com/s3";
public static final String APPLICATION_DESCRIPTION = "Cockpit/0.7.0";
public static final String APPLICATION_TITLE = "JetS3t Cockpit";
private static final int BUCKET_LIST_CHUNKING_SIZE = 1000;
private File cockpitHomeDirectory = Constants.DEFAULT_PREFERENCES_DIRECTORY;
private CockpitPreferences cockpitPreferences = null;
private final Insets insetsZero = new Insets(0, 0, 0, 0);
private final Insets insetsDefault = new Insets(5, 7, 5, 7);
private final ByteFormatter byteFormatter = new ByteFormatter();
private final TimeFormatter timeFormatter = new TimeFormatter();
private final SimpleDateFormat yearAndTimeSDF = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private final SimpleDateFormat timeSDF = new SimpleDateFormat("HH:mm:ss");
private final GuiUtils guiUtils = new GuiUtils();
/**
* Multi-threaded S3 service used by the application.
*/
private S3ServiceMulti s3ServiceMulti = null;
private CloudFrontService cloudFrontService = null;
private Frame ownerFrame = null;
private boolean isStandAloneApplication = false;
// Service main menu items
private JMenuItem loginMenuItem = null;
private JMenuItem logoutMenuItem = null;
// Bucket main menu items
private JPopupMenu bucketActionMenu = null;
private JMenuItem viewBucketPropertiesMenuItem = null;
private JMenuItem refreshBucketMenuItem = null;
private JMenuItem createBucketMenuItem = null;
private JMenuItem manageDistributionsMenuItem = null;
private JMenuItem updateBucketACLMenuItem = null;
private JMenuItem updateBucketRequesterPaysStatusMenuItem = null;
private JMenuItem deleteBucketMenuItem = null;
// Object main menu items
private JPopupMenu objectActionMenu = null;
private JMenuItem refreshObjectMenuItem = null;
private JMenuItem viewOrModifyObjectAttributesMenuItem = null;
private JMenuItem copyObjectsMenuItem = null;
private JMenuItem updateObjectACLMenuItem = null;
private JMenuItem downloadObjectMenuItem = null;
private JMenuItem uploadFilesMenuItem = null;
private JMenuItem generatePublicGetUrls = null;
private JMenuItem generateTorrentUrl = null;
private JMenuItem deleteObjectMenuItem = null;
// Tools menu items.
private JMenuItem bucketLoggingMenuItem = null;
// Preference menu items.
private JMenuItem preferencesDialogMenuItem = null;
// Help menu items.
private JMenuItem cockpitHelpMenuItem = null;
private JMenuItem amazonS3HelpMenuItem = null;
// Tables
private JTable bucketsTable = null;
private JTable objectsTable = null;
private JScrollPane objectsTableSP = null;
private BucketTableModel bucketTableModel = null;
private TableSorter bucketTableModelSorter = null;
private ObjectTableModel objectTableModel = null;
private TableSorter objectTableModelSorter = null;
private JLabel objectsSummaryLabel = null;
private HashMap cachedBuckets = new HashMap();
private ProgressDialog progressDialog = null;
private ObjectsAttributesDialog objectsAttributesDialog = null;
private File downloadDirectory = null;
private File fileChoosersLastUploadDirectory = null;
private JPanel filterObjectsPanel = null;
private JCheckBox filterObjectsCheckBox = null;
private JTextField filterObjectsPrefix = null;
private JComboBox filterObjectsDelimiter = null;
// File comparison options
private static final String UPLOAD_NEW_FILES_ONLY = "Only upload new files";
private static final String UPLOAD_NEW_AND_CHANGED_FILES = "Upload new and changed files";
private static final String UPLOAD_ALL_FILES = "Upload all files";
private static final String DOWNLOAD_NEW_FILES_ONLY = "Only download new files";
private static final String DOWNLOAD_NEW_AND_CHANGED_FILES = "Download new and changed files";
private static final String DOWNLOAD_ALL_FILES = "Download all files";
private EncryptionUtil encryptionUtil = null;
private Jets3tProperties cockpitProperties = null;
private SkinsFactory skinsFactory = null;
/**
* Constructor to run this application as an Applet.
*/
public Cockpit() {
}
/**
* Constructor to run this application in a stand-alone window.
*
* @param ownerFrame the frame the application will be displayed in
* @throws S3ServiceException
*/
public Cockpit(JFrame ownerFrame) throws S3ServiceException {
this.ownerFrame = ownerFrame;
isStandAloneApplication = true;
init();
ownerFrame.getContentPane().add(this);
ownerFrame.setBounds(this.getBounds());
ownerFrame.setVisible(true);
}
/**
* Prepares application to run as a GUI by finding/creating a root owner JFrame, creating an
* un-authenticated {@link RestS3Service} and loading properties files.
*/
public void init() {
super.init();
// Find or create a Frame to own modal dialog boxes.
if (this.ownerFrame == null) {
Component c = this;
while (!(c instanceof Frame) && c.getParent() != null) {
c = c.getParent();
}
if (!(c instanceof Frame)) {
this.ownerFrame = new JFrame();
} else {
this.ownerFrame = (Frame) c;
}
}
// Initialise the GUI.
initGui();
// Initialise a non-authenticated service.
try {
// Revert to anonymous service.
s3ServiceMulti = new S3ServiceMulti(
new RestS3Service(null, APPLICATION_DESCRIPTION, this), this);
cloudFrontService = null;
} catch (S3ServiceException e) {
String message = "Unable to start anonymous service";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
// Load Cockpit configuration files from cockpit's home directory.
File mimeTypesFile = new File(cockpitHomeDirectory, "mime.types");
if (mimeTypesFile.exists()) {
try {
Mimetypes.getInstance().loadAndReplaceMimetypes(
new FileInputStream(mimeTypesFile));
} catch (IOException e) {
String message = "Unable to load mime.types file: " + mimeTypesFile;
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
}
File jets3tPropertiesFile = new File(cockpitHomeDirectory, "jets3t.properties");
if (jets3tPropertiesFile.exists()) {
try {
Jets3tProperties.getInstance(Constants.JETS3T_PROPERTIES_FILENAME)
.loadAndReplaceProperties(new FileInputStream(jets3tPropertiesFile),
"jets3t.properties in Cockpit's home folder " + cockpitHomeDirectory);
} catch (IOException e) {
String message = "Unable to load jets3t.properties file: " + jets3tPropertiesFile;
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
}
// Initialise the user's preferences.
this.cockpitPreferences = new CockpitPreferences();
File cockpitPreferencesPropertiesFile = new File(cockpitHomeDirectory, Constants.COCKPIT_PROPERTIES_FILENAME);
if (cockpitPreferencesPropertiesFile.exists()) {
try {
Properties properties = new Properties();
properties.load(new FileInputStream(cockpitPreferencesPropertiesFile));
this.cockpitPreferences.fromProperties(properties);
} catch (IOException e) {
String message = "Unable to load your preferences";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
}
cockpitProperties = Jets3tProperties.getInstance(Constants.JETS3T_PROPERTIES_FILENAME);
skinsFactory = SkinsFactory.getInstance(cockpitProperties.getProperties());
SwingUtilities.invokeLater(new Runnable() {
public void run() {
loginEvent();
}
});
}
/**
* Initialises the application's GUI elements.
*/
private void initGui() {
initMenus();
JPanel appContent = new JPanel(new GridBagLayout());
this.getContentPane().add(appContent);
// Buckets panel.
JPanel bucketsPanel = new JPanel(new GridBagLayout());
JButton bucketActionButton = new JButton();
bucketActionButton.setToolTipText("Bucket actions menu");
guiUtils.applyIcon(bucketActionButton, "/images/nuvola/16x16/actions/misc.png");
bucketActionButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
JButton sourceButton = (JButton) e.getSource();
bucketActionMenu.show(sourceButton, 0, sourceButton.getHeight());
}
});
bucketsPanel.add(new JHtmlLabel("<html><b>Buckets</b></html>", this),
new GridBagConstraints(0, 0, 1, 1, 1, 0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, insetsZero, 0, 0));
bucketsPanel.add(bucketActionButton,
new GridBagConstraints(1, 0, 1, 1, 0, 0, GridBagConstraints.EAST, GridBagConstraints.HORIZONTAL, insetsZero, 0, 0));
bucketTableModel = new BucketTableModel();
bucketTableModelSorter = new TableSorter(bucketTableModel);
bucketsTable = new JTable(bucketTableModelSorter);
bucketTableModelSorter.setTableHeader(bucketsTable.getTableHeader());
bucketsTable.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
bucketsTable.getSelectionModel().addListSelectionListener(this);
bucketsTable.setShowHorizontalLines(true);
bucketsTable.setShowVerticalLines(false);
// Set column width for Cloud Front distributions indicator.
TableColumn distributionFlagColumn = bucketsTable.getColumnModel().getColumn(1);
int distributionFlagColumnWidth = 18;
distributionFlagColumn.setPreferredWidth(distributionFlagColumnWidth);
distributionFlagColumn.setMaxWidth(distributionFlagColumnWidth);
distributionFlagColumn.setMinWidth(0);
bucketsTable.addMouseListener(new ContextMenuListener());
bucketsPanel.add(new JScrollPane(bucketsTable),
new GridBagConstraints(0, 1, 2, 1, 1, 1, GridBagConstraints.CENTER, GridBagConstraints.BOTH, insetsZero, 0, 0));
bucketsPanel.add(new JLabel(" "),
new GridBagConstraints(0, 2, 2, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, insetsDefault, 0, 0));
// Filter panel.
filterObjectsPanel = new JPanel(new GridBagLayout());
filterObjectsPrefix = new JTextField();
filterObjectsPrefix.setToolTipText("Only show objects with this prefix");
filterObjectsPrefix.addActionListener(this);
filterObjectsPrefix.setActionCommand("RefreshObjects");
filterObjectsDelimiter = new JComboBox(new String[] {"", "/", "?", "\\"});
filterObjectsDelimiter.setEditable(true);
filterObjectsDelimiter.setToolTipText("Object name delimiter");
filterObjectsDelimiter.addActionListener(this);
filterObjectsDelimiter.setActionCommand("RefreshObjects");
filterObjectsPanel.add(new JHtmlLabel("Prefix:", this),
new GridBagConstraints(0, 0, 1, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, insetsZero, 0, 0));
filterObjectsPanel.add(filterObjectsPrefix,
new GridBagConstraints(1, 0, 1, 1, 1, 0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, insetsDefault, 0, 0));
filterObjectsPanel.add(new JHtmlLabel("Delimiter:", this),
new GridBagConstraints(2, 0, 1, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, insetsDefault, 0, 0));
filterObjectsPanel.add(filterObjectsDelimiter,
new GridBagConstraints(3, 0, 1, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, insetsZero, 0, 0));
filterObjectsPanel.setVisible(false);
// Objects panel.
JPanel objectsPanel = new JPanel(new GridBagLayout());
int row = 0;
filterObjectsCheckBox = new JCheckBox("Filter objects");
filterObjectsCheckBox.addActionListener(this);
filterObjectsCheckBox.setToolTipText("Check this option to filter the objects listed");
objectsPanel.add(new JHtmlLabel("<html><b>Objects</b></html>", this),
new GridBagConstraints(0, row, 1, 1, 1, 0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, insetsZero, 0, 0));
objectsPanel.add(filterObjectsCheckBox,
new GridBagConstraints(1, row, 1, 1, 0, 0, GridBagConstraints.EAST, GridBagConstraints.HORIZONTAL, insetsZero, 0, 0));
JButton objectActionButton = new JButton();
objectActionButton.setToolTipText("Object actions menu");
guiUtils.applyIcon(objectActionButton, "/images/nuvola/16x16/actions/misc.png");
objectActionButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
JButton sourceButton = (JButton) e.getSource();
objectActionMenu.show(sourceButton, 0, sourceButton.getHeight());
}
});
objectsPanel.add(objectActionButton,
new GridBagConstraints(2, row, 1, 1, 0, 0, GridBagConstraints.EAST, GridBagConstraints.HORIZONTAL, insetsZero, 0, 0));
objectsPanel.add(filterObjectsPanel,
new GridBagConstraints(0, ++row, 3, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, insetsZero, 0, 0));
objectsTable = new JTable();
objectTableModel = new ObjectTableModel();
objectTableModelSorter = new TableSorter(objectTableModel);
objectTableModelSorter.setTableHeader(objectsTable.getTableHeader());
objectsTable.setModel(objectTableModelSorter);
objectsTable.setDefaultRenderer(Long.class, new DefaultTableCellRenderer() {
private static final long serialVersionUID = 301092191828910402L;
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) {
String formattedSize = byteFormatter.formatByteSize(((Long)value).longValue());
return super.getTableCellRendererComponent(table, formattedSize, isSelected, hasFocus, row, column);
}
});
objectsTable.setDefaultRenderer(Date.class, new DefaultTableCellRenderer() {
private static final long serialVersionUID = 7285511556343895652L;
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) {
Date date = (Date) value;
return super.getTableCellRendererComponent(table, yearAndTimeSDF.format(date), isSelected, hasFocus, row, column);
}
});
objectsTable.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
objectsTable.getSelectionModel().addListSelectionListener(this);
objectsTable.setShowHorizontalLines(true);
objectsTable.setShowVerticalLines(true);
objectsTable.addMouseListener(new ContextMenuListener());
objectsTableSP = new JScrollPane(objectsTable);
objectsPanel.add(objectsTableSP,
new GridBagConstraints(0, ++row, 3, 1, 1, 1, GridBagConstraints.CENTER, GridBagConstraints.BOTH, insetsZero, 0, 0));
objectsSummaryLabel = new JHtmlLabel("Please select a bucket", this);
objectsSummaryLabel.setHorizontalAlignment(JLabel.CENTER);
objectsSummaryLabel.setFocusable(false);
objectsPanel.add(objectsSummaryLabel,
new GridBagConstraints(0, ++row, 3, 1, 1, 0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, insetsDefault, 0, 0));
// Combine sections.
JSplitPane splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT,
bucketsPanel, objectsPanel);
splitPane.setOneTouchExpandable(true);
splitPane.setContinuousLayout(true);
appContent.add(splitPane,
new GridBagConstraints(0, 0, 1, 1, 1, 1, GridBagConstraints.CENTER, GridBagConstraints.BOTH, insetsDefault, 0, 0));
// Set preferred sizes
int preferredWidth = 800;
int preferredHeight = 600;
this.setBounds(new Rectangle(new Dimension(preferredWidth, preferredHeight)));
splitPane.setResizeWeight(0.30);
// Initialize drop target.
initDropTarget(new JComponent[] {objectsTableSP, objectsTable} );
objectsTable.getDropTarget().setActive(false);
objectsTableSP.getDropTarget().setActive(false);
}
/**
* Initialise the application's menu bar.
*/
private void initMenus() {
JMenuBar appMenuBar = new JMenuBar();
this.setJMenuBar(appMenuBar);
// Service menu
JMenu serviceMenu = new JMenu("Service");
loginMenuItem = new JMenuItem("Log in...");
loginMenuItem.setActionCommand("LoginEvent");
loginMenuItem.addActionListener(this);
guiUtils.applyIcon(loginMenuItem, "/images/nuvola/16x16/actions/connect_creating.png");
serviceMenu.add(loginMenuItem);
logoutMenuItem = new JMenuItem("Log out");
logoutMenuItem.setActionCommand("LogoutEvent");
logoutMenuItem.addActionListener(this);
guiUtils.applyIcon(logoutMenuItem, "/images/nuvola/16x16/actions/connect_no.png");
serviceMenu.add(logoutMenuItem);
if (isStandAloneApplication) {
serviceMenu.add(new JSeparator());
JMenuItem quitMenuItem = new JMenuItem("Quit");
quitMenuItem.setActionCommand("QuitEvent");
quitMenuItem.addActionListener(this);
guiUtils.applyIcon(quitMenuItem, "/images/nuvola/16x16/actions/exit.png");
serviceMenu.add(quitMenuItem);
}
loginMenuItem.setEnabled(true);
logoutMenuItem.setEnabled(false);
// Bucket action menu.
bucketActionMenu = new JPopupMenu();
refreshBucketMenuItem = new JMenuItem("Refresh bucket listing");
refreshBucketMenuItem.setActionCommand("RefreshBuckets");
refreshBucketMenuItem.addActionListener(this);
guiUtils.applyIcon(refreshBucketMenuItem, "/images/nuvola/16x16/actions/reload.png");
bucketActionMenu.add(refreshBucketMenuItem);
viewBucketPropertiesMenuItem = new JMenuItem("View bucket properties...");
viewBucketPropertiesMenuItem.setActionCommand("ViewBucketProperties");
viewBucketPropertiesMenuItem.addActionListener(this);
guiUtils.applyIcon(viewBucketPropertiesMenuItem, "/images/nuvola/16x16/actions/viewmag.png");
bucketActionMenu.add(viewBucketPropertiesMenuItem);
updateBucketACLMenuItem = new JMenuItem("Update bucket's Access Control List...");
updateBucketACLMenuItem.setActionCommand("UpdateBucketACL");
updateBucketACLMenuItem.addActionListener(this);
guiUtils.applyIcon(updateBucketACLMenuItem, "/images/nuvola/16x16/actions/encrypted.png");
bucketActionMenu.add(updateBucketACLMenuItem);
updateBucketRequesterPaysStatusMenuItem = new JMenuItem("Update bucket's Requester Pays status...");
updateBucketRequesterPaysStatusMenuItem.setActionCommand("UpdateBucketRequesterPaysStatus");
updateBucketRequesterPaysStatusMenuItem.addActionListener(this);
guiUtils.applyIcon(updateBucketRequesterPaysStatusMenuItem, "/images/nuvola/16x16/actions/identity.png");
bucketActionMenu.add(updateBucketRequesterPaysStatusMenuItem);
bucketActionMenu.add(new JSeparator());
createBucketMenuItem = new JMenuItem("Create new bucket...");
createBucketMenuItem.setActionCommand("CreateBucket");
createBucketMenuItem.addActionListener(this);
guiUtils.applyIcon(createBucketMenuItem, "/images/nuvola/16x16/actions/viewmag+.png");
bucketActionMenu.add(createBucketMenuItem);
JMenuItem thirdPartyBucketMenuItem = new JMenuItem("Add third-party bucket...");
thirdPartyBucketMenuItem.setActionCommand("AddThirdPartyBucket");
thirdPartyBucketMenuItem.addActionListener(this);
guiUtils.applyIcon(thirdPartyBucketMenuItem, "/images/nuvola/16x16/actions/viewmagfit.png");
bucketActionMenu.add(thirdPartyBucketMenuItem);
bucketActionMenu.add(new JSeparator());
deleteBucketMenuItem = new JMenuItem("Delete bucket...");
deleteBucketMenuItem.setActionCommand("DeleteBucket");
deleteBucketMenuItem.addActionListener(this);
guiUtils.applyIcon(deleteBucketMenuItem, "/images/nuvola/16x16/actions/cancel.png");
bucketActionMenu.add(deleteBucketMenuItem);
viewBucketPropertiesMenuItem.setEnabled(false);
refreshBucketMenuItem.setEnabled(false);
createBucketMenuItem.setEnabled(false);
updateBucketACLMenuItem.setEnabled(false);
updateBucketRequesterPaysStatusMenuItem.setEnabled(false);
deleteBucketMenuItem.setEnabled(false);
// Object action menu.
objectActionMenu = new JPopupMenu();
refreshObjectMenuItem = new JMenuItem("Refresh object listing");
refreshObjectMenuItem.setActionCommand("RefreshObjects");
refreshObjectMenuItem.addActionListener(this);
guiUtils.applyIcon(refreshObjectMenuItem, "/images/nuvola/16x16/actions/reload.png");
objectActionMenu.add(refreshObjectMenuItem);
viewOrModifyObjectAttributesMenuItem = new JMenuItem("View or Modify object attributes...");
viewOrModifyObjectAttributesMenuItem.setActionCommand("ViewOrModifyObjectAttributes");
viewOrModifyObjectAttributesMenuItem.addActionListener(this);
guiUtils.applyIcon(viewOrModifyObjectAttributesMenuItem, "/images/nuvola/16x16/actions/viewmag.png");
objectActionMenu.add(viewOrModifyObjectAttributesMenuItem);
copyObjectsMenuItem = new JMenuItem("Copy or Move objects...");
copyObjectsMenuItem.setActionCommand("CopyObjects");
copyObjectsMenuItem.addActionListener(this);
guiUtils.applyIcon(copyObjectsMenuItem, "/images/nuvola/16x16/actions/filenew.png");
objectActionMenu.add(copyObjectsMenuItem);
updateObjectACLMenuItem = new JMenuItem("View or Modify Access Control Lists...");
updateObjectACLMenuItem.setActionCommand("UpdateObjectACL");
updateObjectACLMenuItem.addActionListener(this);
guiUtils.applyIcon(updateObjectACLMenuItem, "/images/nuvola/16x16/actions/encrypted.png");
objectActionMenu.add(updateObjectACLMenuItem);
downloadObjectMenuItem = new JMenuItem("Download objects...");
downloadObjectMenuItem.setActionCommand("DownloadObjects");
downloadObjectMenuItem.addActionListener(this);
guiUtils.applyIcon(downloadObjectMenuItem, "/images/nuvola/16x16/actions/1downarrow.png");
objectActionMenu.add(downloadObjectMenuItem);
uploadFilesMenuItem = new JMenuItem("Upload files...");
uploadFilesMenuItem.setActionCommand("UploadFiles");
uploadFilesMenuItem.addActionListener(this);
guiUtils.applyIcon(uploadFilesMenuItem, "/images/nuvola/16x16/actions/1uparrow.png");
objectActionMenu.add(uploadFilesMenuItem);
objectActionMenu.add(new JSeparator());
generatePublicGetUrls = new JMenuItem("Generate Public GET URLs...");
generatePublicGetUrls.setActionCommand("GeneratePublicGetURLs");
generatePublicGetUrls.addActionListener(this);
guiUtils.applyIcon(generatePublicGetUrls, "/images/nuvola/16x16/actions/wizard.png");
objectActionMenu.add(generatePublicGetUrls);
generateTorrentUrl = new JMenuItem("Generate Torrent URL...");
generateTorrentUrl.setActionCommand("GenerateTorrentURL");
generateTorrentUrl.addActionListener(this);
guiUtils.applyIcon(generateTorrentUrl, "/images/nuvola/16x16/actions/wizard.png");
objectActionMenu.add(generateTorrentUrl);
objectActionMenu.add(new JSeparator());
deleteObjectMenuItem = new JMenuItem("Delete objects...");
deleteObjectMenuItem.setActionCommand("DeleteObjects");
deleteObjectMenuItem.addActionListener(this);
guiUtils.applyIcon(deleteObjectMenuItem, "/images/nuvola/16x16/actions/cancel.png");
objectActionMenu.add(deleteObjectMenuItem);
viewOrModifyObjectAttributesMenuItem.setEnabled(false);
copyObjectsMenuItem.setEnabled(false);
refreshObjectMenuItem.setEnabled(false);
updateObjectACLMenuItem.setEnabled(false);
downloadObjectMenuItem.setEnabled(false);
uploadFilesMenuItem.setEnabled(false);
generatePublicGetUrls.setEnabled(false);
generateTorrentUrl.setEnabled(false);
deleteObjectMenuItem.setEnabled(false);
// Tools menu.
JMenu toolsMenu = new JMenu("Tools");
bucketLoggingMenuItem = new JMenuItem("Configure Bucket logging...");
bucketLoggingMenuItem.setActionCommand("BucketLogging");
bucketLoggingMenuItem.addActionListener(this);
bucketLoggingMenuItem.setEnabled(false);
guiUtils.applyIcon(bucketLoggingMenuItem, "/images/nuvola/16x16/actions/toggle_log.png");
toolsMenu.add(bucketLoggingMenuItem);
manageDistributionsMenuItem = new JMenuItem("Manage CloudFront Distributions...");
manageDistributionsMenuItem.setActionCommand("ManageDistributions");
manageDistributionsMenuItem.addActionListener(this);
guiUtils.applyIcon(manageDistributionsMenuItem, "/images/nuvola/16x16/actions/irkick.png");
manageDistributionsMenuItem.setEnabled(false);
toolsMenu.add(manageDistributionsMenuItem);
toolsMenu.add(new JSeparator());
preferencesDialogMenuItem = new JMenuItem("Preferences...");
preferencesDialogMenuItem.setActionCommand("PreferencesDialog");
preferencesDialogMenuItem.addActionListener(this);
guiUtils.applyIcon(preferencesDialogMenuItem, "/images/nuvola/16x16/actions/configure.png");
toolsMenu.add(preferencesDialogMenuItem);
// Help menu.
JMenu helpMenu = new JMenu("Help");
cockpitHelpMenuItem = new JMenuItem("Cockpit Guide");
guiUtils.applyIcon(cockpitHelpMenuItem, "/images/nuvola/16x16/actions/help.png");
cockpitHelpMenuItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
try {
followHyperlink(new URL(JETS3T_COCKPIT_HELP_PAGE), "_blank");
} catch (MalformedURLException ex) {
throw new IllegalStateException("Invalid URL embedded in program: "
+ JETS3T_COCKPIT_HELP_PAGE);
}
}
});
helpMenu.add(cockpitHelpMenuItem);
amazonS3HelpMenuItem = new JMenuItem("Amazon S3");
guiUtils.applyIcon(amazonS3HelpMenuItem, "/images/nuvola/16x16/actions/gohome.png");
amazonS3HelpMenuItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
try {
followHyperlink(new URL(AMAZON_S3_PAGE), "_blank");
} catch (MalformedURLException ex) {
throw new IllegalStateException("Invalid URL embedded in program: "
+ AMAZON_S3_PAGE);
}
}
});
helpMenu.add(amazonS3HelpMenuItem);
// Build application menu bar.
appMenuBar.add(serviceMenu);
appMenuBar.add(toolsMenu);
appMenuBar.add(helpMenu);
}
/**
* Initialise the application's File drop targets for drag and drop copying of local files
* to S3.
*
* @param dropTargetComponents
* the components files can be dropped on to transfer them to S3
*/
private void initDropTarget(JComponent[] dropTargetComponents) {
DropTargetListener dropTargetListener = new DropTargetListener() {
private boolean checkValidDrag(DropTargetDragEvent dtde) {
if (dtde.isDataFlavorSupported(DataFlavor.javaFileListFlavor)
&& (DnDConstants.ACTION_COPY == dtde.getDropAction()
|| DnDConstants.ACTION_MOVE == dtde.getDropAction()))
{
dtde.acceptDrag(dtde.getDropAction());
return true;
} else {
dtde.rejectDrag();
return false;
}
}
public void dragEnter(DropTargetDragEvent dtde) {
if (checkValidDrag(dtde)) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
objectsTable.requestFocusInWindow();
};
});
}
}
public void dragOver(DropTargetDragEvent dtde) {
checkValidDrag(dtde);
}
public void dropActionChanged(DropTargetDragEvent dtde) {
if (checkValidDrag(dtde)) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
objectsTable.requestFocusInWindow();
};
});
} else {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
ownerFrame.requestFocusInWindow();
};
});
}
}
public void dragExit(DropTargetEvent dte) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
ownerFrame.requestFocusInWindow();
};
});
}
public void drop(DropTargetDropEvent dtde) {
if (dtde.isDataFlavorSupported(DataFlavor.javaFileListFlavor)
&& (DnDConstants.ACTION_COPY == dtde.getDropAction()
|| DnDConstants.ACTION_MOVE == dtde.getDropAction()))
{
dtde.acceptDrop(dtde.getDropAction());
try {
final List fileList = (List) dtde.getTransferable().getTransferData(
DataFlavor.javaFileListFlavor);
if (fileList != null && fileList.size() > 0) {
uploadFiles((File[]) fileList.toArray(new File[fileList.size()]));
}
} catch (Exception e) {
String message = "Unable to start accept dropped items";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, null, message, e);
}
} else {
dtde.rejectDrop();
}
}
};
// Attach drop target listener to each target component.
for (int i = 0; i < dropTargetComponents.length; i++) {
new DropTarget(dropTargetComponents[i], DnDConstants.ACTION_COPY, dropTargetListener, true);
}
}
/**
* Run the provided Runnable object in a background thread. This method will
* return as soon as the background thread is started, it does not wait for
* the thread to complete.
*/
private synchronized void runInBackgroundThread(Runnable runnable) {
Thread t = new Thread(runnable);
t.start();
}
/**
* Run the provided runnable in the application's event dispatcher thread,
* and wait for the action to complete before returning.
*
* @param runnable
* @return
*/
private synchronized boolean runInDispatcherThreadImmediately(Runnable runnable) {
try {
SwingUtilities.invokeAndWait(runnable);
return true;
} catch (Exception e) {
log.error("Error displaying graphical elements", e);
return false;
}
}
/**
* Starts a progress display dialog that cannot be cancelled. While the dialog is running the user
* cannot interact with the application.
*
* @param statusText
* describes the status of a task in text meaningful to the user
*/
private void startProgressDialog(String statusText) {
startProgressDialog(statusText, null, 0, 0, null, null);
}
/**
* Starts a progress display dialog. While the dialog is running the user cannot interact
* with the application, except to cancel the task.
*
* @param statusMessage
* describes the status of a task text meaningful to the user, such as "3 files of 7 uploaded"
* @param detailsText
* describes the status of a task in more detail, such as the current transfer rate and Time remaining.
* @param minTaskValue the minimum progress value for a task, generally 0
* @param maxTaskValue
* the maximum progress value for a task, such as the total number of threads or 100 if
* using percentage-complete as a metric.
* @param cancelEventListener
* listener that is responsible for cancelling a long-lived task when the user clicks
* the cancel button. If a task cannot be cancelled this must be null.
* @param cancelButtonText
* text displayed in the cancel button if a task can be cancelled. This is only used if
* a cancel event listener is provided.
*/
private void startProgressDialog(final String statusMessage, final String detailsText,
final int minTaskValue, final int maxTaskValue, final String cancelButtonText,
final CancelEventTrigger cancelEventListener)
{
if (this.progressDialog == null) {
this.progressDialog = new ProgressDialog(this.ownerFrame, "Please wait...", null);
}
SwingUtilities.invokeLater(new Runnable() {
public void run() {
progressDialog.startDialog(statusMessage, detailsText, minTaskValue, maxTaskValue,
cancelEventListener, cancelButtonText);
}
});
}
/**
* Updates the status text and value of the progress display dialog.
* @param statusMessage
* describes the status of a task text meaningful to the user, such as "3 files of 7 uploaded"
* @param detailsText
* describes the status of a task in more detail, such as the current transfer rate and time remaining.
* @param progressValue
* value representing how far through the task we are (relative to min and max values)
*/
private void updateProgressDialog(final String statusMessage, final String detailsText, final int progressValue) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
progressDialog.updateDialog(statusMessage, detailsText, progressValue);
}
});
}
/**
* Stops/halts the progress display dialog and allows the user to interact with the application.
*/
private void stopProgressDialog() {
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
progressDialog.stopDialog();
}
});
}
/**
* Event handler for this application, handles all menu items.
*/
public void actionPerformed(ActionEvent event) {
// Service Menu Events
if ("LoginEvent".equals(event.getActionCommand())) {
loginEvent();
} else if ("LogoutEvent".equals(event.getActionCommand())) {
logoutEvent();
} else if ("QuitEvent".equals(event.getActionCommand())) {
System.exit(0);
}
// Bucket Events.
else if ("ViewBucketProperties".equals(event.getActionCommand())) {
listBucketProperties();
} else if ("RefreshBuckets".equals(event.getActionCommand())) {
listAllBuckets();
} else if ("CreateBucket".equals(event.getActionCommand())) {
createBucketAction();
} else if ("DeleteBucket".equals(event.getActionCommand())) {
deleteSelectedBucket();
} else if ("ManageDistributions".equals(event.getActionCommand())) {
S3Bucket[] buckets = bucketTableModel.getBuckets();
String[] bucketNames = new String[buckets.length];
for (int i = 0; i < buckets.length; i++) {
bucketNames[i] = buckets[i].getName();
}
ManageDistributionsDialog.showDialog(ownerFrame, cloudFrontService, bucketNames, this);
} else if ("AddThirdPartyBucket".equals(event.getActionCommand())) {
addThirdPartyBucket();
} else if ("UpdateBucketACL".equals(event.getActionCommand())) {
updateBucketAccessControlList();
} else if ("UpdateBucketRequesterPaysStatus".equals(event.getActionCommand())) {
updateBucketRequesterPaysSetting();
}
// Object Events
else if ("ViewOrModifyObjectAttributes".equals(event.getActionCommand())) {
displayObjectsAttributesDialog();
} else if ("CopyObjects".equals(event.getActionCommand())) {
copyObjects();
} else if ("RefreshObjects".equals(event.getActionCommand())) {
listObjects();
} else if ("UpdateObjectACL".equals(event.getActionCommand())) {
displayAclModificationDialog();
} else if ("GeneratePublicGetURLs".equals(event.getActionCommand())) {
generatePublicGetUrls();
} else if ("GenerateTorrentURL".equals(event.getActionCommand())) {
generateTorrentUrl();
} else if ("DeleteObjects".equals(event.getActionCommand())) {
deleteSelectedObjects();
} else if ("DownloadObjects".equals(event.getActionCommand())) {
downloadSelectedObjects();
} else if ("UploadFiles".equals(event.getActionCommand())) {
JFileChooser fileChooser = new JFileChooser();
fileChooser.setMultiSelectionEnabled(true);
fileChooser.setDialogTitle("Choose files to upload");
fileChooser.setFileSelectionMode(JFileChooser.FILES_AND_DIRECTORIES);
fileChooser.setApproveButtonText("Upload files");
fileChooser.setCurrentDirectory(fileChoosersLastUploadDirectory);
int returnVal = fileChooser.showOpenDialog(ownerFrame);
if (returnVal != JFileChooser.APPROVE_OPTION) {
return;
}
final File[] uploadFiles = fileChooser.getSelectedFiles();
if (uploadFiles.length == 0) {
return;
}
// Save the chosen directory location for next time.
fileChoosersLastUploadDirectory = uploadFiles[0].getParentFile();
uploadFiles(uploadFiles);
} else if (event.getSource().equals(filterObjectsCheckBox)) {
if (filterObjectsCheckBox.isSelected()) {
filterObjectsPanel.setVisible(true);
} else {
filterObjectsPanel.setVisible(false);
filterObjectsPrefix.setText("");
if (filterObjectsDelimiter.getSelectedIndex() != 0) {
filterObjectsDelimiter.setSelectedIndex(0);
}
}
}
// Tools events
else if ("BucketLogging".equals(event.getActionCommand())) {
S3Bucket[] buckets = bucketTableModel.getBuckets();
BucketLoggingDialog.showDialog(ownerFrame, s3ServiceMulti.getS3Service(), buckets, this);
}
// Preference Events
else if ("PreferencesDialog".equals(event.getActionCommand())) {
PreferencesDialog.showDialog(cockpitPreferences, ownerFrame, this);
// Save a user's preferences if requested, otherwise wipe any existing preferences file.
File cockpitPreferencesPropertiesFile = new File(cockpitHomeDirectory, Constants.COCKPIT_PROPERTIES_FILENAME);
if (cockpitPreferences.isRememberPreferences()) {
try {
Properties properties = cockpitPreferences.toProperties();
if (!cockpitHomeDirectory.exists()) {
cockpitHomeDirectory.mkdir();
}
properties.list(new PrintStream(
new FileOutputStream(cockpitPreferencesPropertiesFile)));
} catch (IOException e) {
String message = "Unable to save your preferences";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
} else if (cockpitPreferencesPropertiesFile.exists()) {
// User elected not to store preferences, delete the existing preferences file.
cockpitPreferencesPropertiesFile.delete();
}
if (cockpitPreferences.isEncryptionPasswordSet()) {
try {
encryptionUtil = new EncryptionUtil(
cockpitPreferences.getEncryptionPassword(),
cockpitPreferences.getEncryptionAlgorithm(),
EncryptionUtil.DEFAULT_VERSION);
} catch (Exception e) {
String message = "Unable to start encryption utility";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
} else {
encryptionUtil = null;
}
}
// Ooops...
else {
log.warn("Unrecognised ActionEvent command '" + event.getActionCommand() + "' in " + event);
}
}
/**
* Handles list selection events for this application.
*/
public void valueChanged(ListSelectionEvent e) {
if (e.getValueIsAdjusting()) {
return;
}
if (e.getSource().equals(bucketsTable.getSelectionModel())) {
bucketSelectedAction();
} else if (e.getSource().equals(objectsTable.getSelectionModel())) {
objectSelectedAction();
}
}
/**
* Displays the {@link StartupDialog} dialog and, if the user provides login credentials,
* logs into the S3 service using those credentials.
*
* This method should always be run within the event dispatcher thread.
*/
private void loginEvent() {
try {
StartupDialog startupDialog = new StartupDialog(ownerFrame, cockpitProperties, this);
startupDialog.setVisible(true);
AWSCredentials awsCredentials = startupDialog.getAWSCredentials();
startupDialog.dispose();
if (awsCredentials == null) {
return;
}
s3ServiceMulti = new S3ServiceMulti(
new RestS3Service(awsCredentials, APPLICATION_DESCRIPTION, this), this);
cloudFrontService = new CloudFrontService(
awsCredentials, APPLICATION_DESCRIPTION, this, null, null);
try {
// Check that the user is actually signed-up for CloudFront.
cloudFrontService.listDistributions();
} catch (CloudFrontServiceException e) {
if ("OptInRequired".equals(e.getErrorCode())) {
log.warn("Your AWS account is not subscribed to the Amazon CloudFront service, "
+ "you will not be able to manage distributions");
}
cloudFrontService = null;
}
if (awsCredentials == null) {
log.debug("Log in cancelled by user");
return;
}
listAllBuckets();
objectsSummaryLabel.setText(" ");
loginMenuItem.setEnabled(false);
logoutMenuItem.setEnabled(true);
refreshBucketMenuItem.setEnabled(true);
createBucketMenuItem.setEnabled(true);
bucketLoggingMenuItem.setEnabled(true);
manageDistributionsMenuItem.setEnabled(cloudFrontService != null);
} catch (Exception e) {
String message = "Unable to log in to S3";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
logoutEvent();
}
}
/**
* Logs out of the S3 service by clearing all listed objects and buckets and resetting
* the s3ServiceMulti member variable.
*
* This method should always be invoked within the event dispatching thread.
*/
private void logoutEvent() {
log.debug("Logging out");
try {
// Revert to anonymous service.
s3ServiceMulti = new S3ServiceMulti(
new RestS3Service(null, APPLICATION_DESCRIPTION, this), this);
cloudFrontService = null;
bucketsTable.clearSelection();
bucketTableModel.removeAllBuckets();
objectTableModel.removeAllObjects();
objectsSummaryLabel.setText(" ");
ownerFrame.setTitle(APPLICATION_TITLE);
loginMenuItem.setEnabled(true);
logoutMenuItem.setEnabled(false);
refreshBucketMenuItem.setEnabled(false);
createBucketMenuItem.setEnabled(false);
bucketLoggingMenuItem.setEnabled(false);
manageDistributionsMenuItem.setEnabled(false);
} catch (Exception e) {
String message = "Unable to log out from S3";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
}
/**
* Displays the currently selected bucket's properties in the dialog {@link ItemPropertiesDialog}.
*/
private void listBucketProperties() {
final S3Bucket selectedBucket = getCurrentSelectedBucket();
if (selectedBucket.getAcl() == null || !selectedBucket.isLocationKnown()) {
// Retrieve all a bucket's details before displaying the summary.
runInBackgroundThread(new Runnable() {
public void run() {
startProgressDialog("Retrieving details for bucket " + selectedBucket.getName());
try {
try {
if (selectedBucket.getAcl() == null) {
selectedBucket.setAcl(
s3ServiceMulti.getS3Service().getBucketAcl(
selectedBucket));
}
if (!selectedBucket.isLocationKnown()) {
selectedBucket.setLocation(
s3ServiceMulti.getS3Service().getBucketLocation(
selectedBucket.getName()));
}
if (!selectedBucket.isRequesterPaysKnown()) {
selectedBucket.setRequesterPays(
s3ServiceMulti.getS3Service().isRequesterPaysBucket(
selectedBucket.getName()));
}
} catch (S3ServiceException e) {
// Retrieving details for a third-party bucket will
// often fail when ACL or Location is retrieved,
// ignore these failures.
}
stopProgressDialog();
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
ItemPropertiesDialog.showDialog(ownerFrame, selectedBucket, null);
}
});
} catch (final Exception e) {
stopProgressDialog();
String message = "Unable to retrieve details for bucket";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, null, message, e);
}
};
});
} else {
ItemPropertiesDialog.showDialog(ownerFrame, selectedBucket, null);
}
}
/**
* Displays the currently selected object's properties in the dialog {@link ObjectsAttributesDialog}.
* <p>
* As detailed information about the object may not yet be available, this method works
* indirectly via the {@link #retrieveObjectsDetails} method. The <code>retrieveObjectsDetails</code>
* method retrieves all the details for the currently selected objects, and once they are available
* knows to display the dialog as the {@link #isViewingOrModifyingObjectProperties} flag is set.
*/
private void displayObjectsAttributesDialog() {
runInBackgroundThread(new Runnable() {
public void run() {
if (!retrieveObjectsDetails(getSelectedObjects())) {
return;
}
if (objectsAttributesDialog == null) {
objectsAttributesDialog = new ObjectsAttributesDialog(
ownerFrame, "Object Attributes", skinsFactory);
}
final S3Object[] sourceObjects = getSelectedObjects();
boolean ok = runInDispatcherThreadImmediately(new Runnable() {
public void run() {
objectsAttributesDialog.displayDialog(sourceObjects, true);
}
});
if (!ok) {
return;
}
final String[] sourceObjectKeys = objectsAttributesDialog.getSourceObjectKeys();
final S3Object[] destinationObjects = objectsAttributesDialog.getDestinationObjects();
if (!objectsAttributesDialog.isModifyActionApproved()) {
// Do nothing.
return;
}
// Retain ACL settings from original objects.
if (!s3ServiceMulti.getObjectACLs(getCurrentSelectedBucket(), sourceObjects)) {
return;
}
for (int i = 0; i < sourceObjects.length; i++) {
destinationObjects[i].setAcl(
sourceObjects[i].getAcl());
}
// Copy objects in-place, to REPLACE their metadata attributes.
ok = s3ServiceMulti.copyObjects(
getCurrentSelectedBucket().getName(), getCurrentSelectedBucket().getName(),
sourceObjectKeys, destinationObjects, true);
// Refresh details for modified objects
if (ok) {
s3ServiceMulti.getObjectsHeads(
getCurrentSelectedBucket(), destinationObjects);
}
}
});
}
/**
* Lists the buckets in the user's S3 account and refreshes the GUI to display
* these buckets. Any buckets or objects already listed in the GUI are cleared first.
*/
private void listAllBuckets() {
// Remove current bucket and object data from models.
cachedBuckets.clear();
bucketsTable.clearSelection();
bucketTableModel.removeAllBuckets();
objectTableModel.removeAllObjects();
// This is all very convoluted. This was necessary so we can display the status dialog box.
runInBackgroundThread(new Runnable() {
public void run() {
startProgressDialog("Listing buckets for " + s3ServiceMulti.getAWSCredentials().getAccessKey());
try {
final S3Bucket[] buckets = s3ServiceMulti.getS3Service().listAllBuckets();
// Lookup user's CloudFront distributions.
Distribution[] distributions = new Distribution[] {};
if (cloudFrontService != null) {
updateProgressDialog("Listing distributions for " + cloudFrontService.getAWSCredentials().getAccessKey(), "", 0);
distributions = cloudFrontService.listDistributions();
}
final Distribution[] finalDistributions = distributions;
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
for (int i = 0; i < buckets.length; i++) {
// Determine whether each bucket has one or more CloudFront distributions.
boolean bucketHasDistribution = false;
for (int j = 0; j < finalDistributions.length; j++) {
if (finalDistributions[j].getOrigin().equals(buckets[i].getName() + ".s3.amazonaws.com")) {
bucketHasDistribution = true;
}
}
bucketTableModel.addBucket(buckets[i], bucketHasDistribution);
if (i == 0) {
ownerFrame.setTitle(APPLICATION_TITLE + " : " +
buckets[i].getOwner().getDisplayName());
}
}
}
});
} catch (final Exception e) {
stopProgressDialog();
SwingUtilities.invokeLater(new Runnable() {
public void run() {
logoutEvent();
String message = "Unable to list your buckets in S3, please log in again";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, null, message, e);
loginEvent();
}
});
} finally {
stopProgressDialog();
}
};
});
}
/**
* This method is an {@link S3ServiceEventListener} action method that is invoked when this
* application's <code>S3ServiceMulti</code> triggers a <code>GetObjectsEvent</code>.
* <p>
* This never happens in this application as downloads are performed by
* {@link S3ServiceMulti#downloadObjects(S3Bucket, DownloadPackage[])} instead.
*
* @param event
*/
public void s3ServiceEventPerformed(GetObjectsEvent event) {
// Not used.
}
/**
* This method is an {@link S3ServiceEventListener} action method that is invoked when this
* application's <code>S3ServiceMulti</code> triggers a <code>ListObjectsEvent</code>.
* <p>
* This never happens in this application as it does not perform multi-threaded object
* listings.
*
* @param event
*/
public void s3ServiceEventPerformed(ListObjectsEvent event) {
// Not used.
}
/**
* Actions performed when a bucket is selected in the bucket list table.
*/
private void bucketSelectedAction() {
S3Bucket newlySelectedBucket = getCurrentSelectedBucket();
if (newlySelectedBucket == null) {
viewBucketPropertiesMenuItem.setEnabled(false);
refreshBucketMenuItem.setEnabled(true);
updateBucketACLMenuItem.setEnabled(false);
updateBucketRequesterPaysStatusMenuItem.setEnabled(false);
deleteBucketMenuItem.setEnabled(false);
refreshObjectMenuItem.setEnabled(false);
uploadFilesMenuItem.setEnabled(false);
objectTableModel.removeAllObjects();
objectsTable.getDropTarget().setActive(false);
objectsTableSP.getDropTarget().setActive(false);
return;
}
viewBucketPropertiesMenuItem.setEnabled(true);
refreshBucketMenuItem.setEnabled(true);
updateBucketACLMenuItem.setEnabled(true);
updateBucketRequesterPaysStatusMenuItem.setEnabled(true);
deleteBucketMenuItem.setEnabled(true);
refreshObjectMenuItem.setEnabled(true);
uploadFilesMenuItem.setEnabled(true);
objectsTable.getDropTarget().setActive(true);
objectsTableSP.getDropTarget().setActive(true);
if (cachedBuckets.containsKey(newlySelectedBucket.getName())) {
S3Object[] objects = (S3Object[]) cachedBuckets.get(newlySelectedBucket.getName());
objectTableModel.removeAllObjects();
objectTableModel.addObjects(objects);
updateObjectsSummary(false);
} else {
listObjects();
}
}
/**
* Actions performed when an object is selected in the objects list table.
*/
private void objectSelectedAction() {
int count = getSelectedObjects().length;
updateObjectACLMenuItem.setEnabled(count > 0);
downloadObjectMenuItem.setEnabled(count > 0);
deleteObjectMenuItem.setEnabled(count > 0);
viewOrModifyObjectAttributesMenuItem.setEnabled(count > 0);
copyObjectsMenuItem.setEnabled(count > 0);
generatePublicGetUrls.setEnabled(count >= 1);
generateTorrentUrl.setEnabled(count == 1);
}
/**
* Starts a thread to run {@link S3ServiceMulti#listObjects}.
*/
private void listObjects() {
if (getCurrentSelectedBucket() == null) {
// Oops, better do nothing.
return;
}
final boolean listingCancelled[] = new boolean[1]; // Default to false.
final CancelEventTrigger cancelListener = new CancelEventTrigger() {
private static final long serialVersionUID = 6939193243303189876L;
public void cancelTask(Object eventSource) {
listingCancelled[0] = true;
}
};
// This is all very convoluted, it was done this way to ensure we can display the dialog box.
runInBackgroundThread(new Runnable() {
public void run() {
try {
objectTableModel.removeAllObjects();
objectsSummaryLabel.setText(" ");
startProgressDialog(
"Listing objects in " + getCurrentSelectedBucket().getName(),
"", 0, 0, "Cancel bucket listing", cancelListener);
final String prefix = filterObjectsPrefix.getText();
final String delimiter = (String) filterObjectsDelimiter.getSelectedItem();
final ArrayList allObjects = new ArrayList();
String priorLastKey = null;
do {
S3ObjectsChunk chunk = s3ServiceMulti.getS3Service().listObjectsChunked(
getCurrentSelectedBucket().getName(), prefix, delimiter,
BUCKET_LIST_CHUNKING_SIZE, priorLastKey);
final S3Object[] objects = chunk.getObjects();
for (int i = 0; i < objects.length; i++) {
objects[i].setOwner(getCurrentSelectedBucket().getOwner());
}
priorLastKey = chunk.getPriorLastKey();
allObjects.addAll(Arrays.asList(objects));
updateProgressDialog(
"Listed " + allObjects.size() + " objects in "
+ getCurrentSelectedBucket().getName(), "", 0);
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
objectTableModel.addObjects(objects);
updateObjectsSummary(true);
}
});
} while (!listingCancelled[0] && priorLastKey != null);
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
updateObjectsSummary(listingCancelled[0]);
S3Object[] allObjects = objectTableModel.getObjects();
cachedBuckets.put(getCurrentSelectedBucket().getName(), allObjects);
}
});
} catch (final Exception e) {
stopProgressDialog();
String message = "Unable to list objects";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, null, message, e);
} finally {
stopProgressDialog();
}
};
});
}
/**
* Updates the summary text shown below the listing of objects, which details the
* number and total size of the objects.
*
*/
private void updateObjectsSummary(boolean isIncompleteListing) {
S3Object[] objects = objectTableModel.getObjects();
try {
String summary = "Please select a bucket";
long totalBytes = 0;
if (objects != null) {
summary = "<html>" + objects.length + " item" + (objects.length != 1? "s" : "");
for (int i = 0; i < objects.length; i++) {
totalBytes += objects[i].getContentLength();
}
if (totalBytes > 0) {
summary += ", " + byteFormatter.formatByteSize(totalBytes);
}
summary += " @ " + timeSDF.format(new Date());
if (isObjectFilteringActive()) {
summary += " - <font color=\"blue\">Filtered</font>";
}
if (isIncompleteListing) {
summary += " - <font color=\"red\">Incomplete</font>";
}
summary += "</html>";
}
objectsSummaryLabel.setText(summary);
} catch (Throwable t) {
String message = "Unable to update object list summary";
log.error(message, t);
ErrorDialog.showDialog(ownerFrame, this, message, t);
}
}
/**
* Displays bucket-specific actions in a popup menu.
* @param invoker the component near which the popup menu will be displayed
* @param xPos the mouse's horizontal co-ordinate when the popup menu was invoked
* @param yPos the mouse's vertical co-ordinate when the popup menu was invoked
*/
private void showBucketPopupMenu(JComponent invoker, int xPos, int yPos) {
if (s3ServiceMulti == null) {
return;
}
bucketActionMenu.show(invoker, xPos, yPos);
}
/**
* @return the bucket currently selected in the gui, null if no bucket is selected.
*/
private S3Bucket getCurrentSelectedBucket() {
if (bucketsTable.getSelectedRows().length == 0) {
return null;
} else {
return bucketTableModel.getBucket(
bucketTableModelSorter.modelIndex(
bucketsTable.getSelectedRows()[0]));
}
}
/**
* Displays object-specific actions in a popup menu.
* @param invoker the component near which the popup menu will be displayed
* @param xPos the mouse's horizontal co-ordinate when the popup menu was invoked
* @param yPos the mouse's vertical co-ordinate when the popup menu was invoked
*/
private void showObjectPopupMenu(JComponent invoker, int xPos, int yPos) {
if (getCurrentSelectedBucket() == null || getSelectedObjects().length == 0) {
return;
}
objectActionMenu.show(invoker, xPos, yPos);
}
/**
* Action to create a new bucket in S3 after prompting the user for a bucket name.
*
*/
private void createBucketAction() {
String proposedNewName =
s3ServiceMulti.getAWSCredentials().getAccessKey().toLowerCase()
+ "." + "bucket-name";
CreateBucketDialog dialog = new CreateBucketDialog(proposedNewName, ownerFrame, this);
dialog.setVisible(true);
if (!dialog.getOkClicked()) {
return;
}
final S3Bucket newBucket = new S3Bucket(dialog.getBucketName(), dialog.getBucketLocation());
dialog.dispose();
runInBackgroundThread(new Runnable() {
public void run() {
if (s3ServiceMulti.createBuckets(new S3Bucket[] { newBucket })) {
int modelIndex = bucketTableModel.getBucketIndexByName(newBucket.getName());
int viewIndex = bucketTableModelSorter.viewIndex(modelIndex);
bucketsTable.setRowSelectionInterval(viewIndex, viewIndex);
}
}
});
}
/**
* This method is an {@link S3ServiceEventListener} action method that is invoked when this
* application's <code>S3ServiceMulti</code> triggers a <code>CreateBucketsEvent</code>.
* <p>
* When a bucket is successfully created it is added to the listing of buckets.
*
* @param event
*/
public void s3ServiceEventPerformed(final CreateBucketsEvent event) {
if (ServiceEvent.EVENT_STARTED == event.getEventCode()) {
startProgressDialog(
"Creating " + event.getThreadWatcher().getThreadCount() + " buckets",
"", 0, (int) event.getThreadWatcher().getThreadCount(),
"Cancel bucket creation", event.getThreadWatcher().getCancelEventListener());
}
else if (ServiceEvent.EVENT_IN_PROGRESS == event.getEventCode()) {
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
for (int i = 0; i < event.getCreatedBuckets().length; i++) {
bucketTableModel.addBucket(event.getCreatedBuckets()[i], false);
}
}
});
ThreadWatcher progressStatus = event.getThreadWatcher();
String statusText = "Created " + progressStatus.getCompletedThreads() + " buckets of " + progressStatus.getThreadCount();
updateProgressDialog(statusText, "", (int) progressStatus.getCompletedThreads());
}
else if (ServiceEvent.EVENT_COMPLETED == event.getEventCode()) {
stopProgressDialog();
}
else if (ServiceEvent.EVENT_CANCELLED == event.getEventCode()) {
stopProgressDialog();
}
else if (ServiceEvent.EVENT_ERROR == event.getEventCode()) {
stopProgressDialog();
String message = "Unable to create a bucket";
log.error(message, event.getErrorCause());
ErrorDialog.showDialog(ownerFrame, this, message, event.getErrorCause());
}
}
/**
* Deletes the bucket currently selected in the gui.
*
*/
private void deleteSelectedBucket() {
S3Bucket currentBucket = getCurrentSelectedBucket();
if (currentBucket == null) {
log.warn("Ignoring delete bucket command, no currently selected bucket");
return;
}
int response = JOptionPane.showConfirmDialog(ownerFrame,
"Are you sure you want to delete '" + currentBucket.getName() + "'?",
"Delete Bucket?", JOptionPane.YES_NO_OPTION);
if (response == JOptionPane.NO_OPTION) {
return;
}
try {
s3ServiceMulti.getS3Service().deleteBucket(currentBucket.getName());
bucketTableModel.removeBucket(currentBucket);
} catch (Exception e) {
String message = "Unable to delete bucket";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
}
/**
* Adds a bucket not owned by the current S3 user to the bucket listing, after
* prompting the user for the name of the bucket to add.
* To be added in this way, the third-party bucket must be publicly available.
*
*/
private void addThirdPartyBucket() {
try {
String bucketName = (String) JOptionPane.showInputDialog(ownerFrame,
"Name for third-party bucket:",
"Add a third-party bucket", JOptionPane.QUESTION_MESSAGE);
if (bucketName != null) {
if (s3ServiceMulti.getS3Service().isBucketAccessible(bucketName)) {
S3Bucket thirdPartyBucket = new S3Bucket(bucketName);
bucketTableModel.addBucket(thirdPartyBucket, false);
} else {
String message = "Unable to access third-party bucket: " + bucketName;
log.error(message);
ErrorDialog.showDialog(ownerFrame, this, message, null);
}
}
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
String message = "Unable to access third-party bucket";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
}
/**
* Updates the ACL settings for the currently selected bucket.
*/
private void updateBucketAccessControlList() {
try {
S3Bucket currentBucket = getCurrentSelectedBucket();
AccessControlList bucketACL = s3ServiceMulti.getS3Service().getBucketAcl(currentBucket);
AccessControlList updatedBucketACL = AccessControlDialog.showDialog(
ownerFrame, new S3Bucket[] {currentBucket}, bucketACL, this);
if (updatedBucketACL != null) {
currentBucket.setAcl(updatedBucketACL);
s3ServiceMulti.getS3Service().putBucketAcl(currentBucket);
}
} catch (Exception e) {
String message = "Unable to update bucket's Access Control List";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
}
/**
* Updates the ACL settings for the currently selected bucket.
*/
private void updateBucketRequesterPaysSetting() {
try {
final S3Bucket selectedBucket = getCurrentSelectedBucket();
if (!selectedBucket.isRequesterPaysKnown()) {
selectedBucket.setRequesterPays(
s3ServiceMulti.getS3Service().isRequesterPaysBucket(
selectedBucket.getName()));
}
boolean originalRequesterPaysFlag = selectedBucket.isRequesterPays();
RequesterPaysDialog dialog = new RequesterPaysDialog(selectedBucket, ownerFrame, this);
dialog.setVisible(true);
if (!dialog.getOkClicked()) {
return;
}
final boolean newRequesterPaysFlag = dialog.isRequesterPaysSelected();
dialog.dispose();
if (newRequesterPaysFlag != originalRequesterPaysFlag) {
runInBackgroundThread(new Runnable() {
public void run() {
try {
s3ServiceMulti.getS3Service().setRequesterPaysBucket(
selectedBucket.getName(), newRequesterPaysFlag);
selectedBucket.setRequesterPays(newRequesterPaysFlag);
} catch (final Exception e) {
String message = "Unable to update Requester Pays status";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, null, message, e);
}
}
});
}
} catch (Exception e) {
String message = "Unable to update bucket's Access Control List";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
}
/**
* @return the set of objects currently selected in the gui, or an empty array if none are selected.
*/
private S3Object[] getSelectedObjects() {
int viewRows[] = objectsTable.getSelectedRows();
if (viewRows.length == 0) {
return new S3Object[] {};
} else {
S3Object objects[] = new S3Object[viewRows.length];
for (int i = 0; i < viewRows.length; i++) {
int modelRow = objectTableModelSorter.modelIndex(viewRows[i]);
objects[i] = objectTableModel.getObject(modelRow);
}
return objects;
}
}
private void displayAclModificationDialog() {
final HyperlinkActivatedListener hyperlinkListener = this;
runInBackgroundThread(new Runnable() {
public void run() {
final S3Object[] selectedObjects = getSelectedObjects();
boolean aclLookupSucceeded = s3ServiceMulti.getObjectACLs(
getCurrentSelectedBucket(), selectedObjects);
if (!aclLookupSucceeded) {
return;
}
final AccessControlList[] updatedObjectACL = new AccessControlList[] {null};
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
// Build merged ACL containing ALL relevant permissions
AccessControlList mergedACL = new AccessControlList();
for (int i = 0; i < selectedObjects.length; i++) {
AccessControlList objectACL = selectedObjects[i].getAcl();
mergedACL.grantAllPermissions(objectACL.getGrants());
// BEWARE! Here we assume that all the objects have the same owner...
if (mergedACL.getOwner() == null) {
mergedACL.setOwner(objectACL.getOwner());
}
}
// Show ACL dialog box for user to change ACL settings for all objects.
updatedObjectACL[0] = AccessControlDialog.showDialog(
ownerFrame, selectedObjects, mergedACL, hyperlinkListener);
}
});
if (updatedObjectACL[0] != null) {
// Update ACLs for each object.
for (int i = 0; i < selectedObjects.length; i++) {
selectedObjects[i].setAcl(updatedObjectACL[0]);
}
// Perform ACL updates.
s3ServiceMulti.putACLs(getCurrentSelectedBucket(), selectedObjects);
}
}
});
}
/**
* This method is an {@link S3ServiceEventListener} action method that is invoked when this
* application's <code>S3ServiceMulti</code> triggers a <code>LookupACLEvent</code>.
* <p>
* The ACL details are retrieved for the currently selected objects in the gui, then the
* {@link AccessControlDialog} is displayed to allow the user to update the ACL settings
* for these objects.
*
* @param event
*/
public void s3ServiceEventPerformed(LookupACLEvent event) {
if (ServiceEvent.EVENT_STARTED == event.getEventCode()) {
startProgressDialog(
"Retrieved 0 of " + event.getThreadWatcher().getThreadCount() + " ACLs",
"", 0, (int) event.getThreadWatcher().getThreadCount(), "Cancel Lookup",
event.getThreadWatcher().getCancelEventListener());
}
else if (ServiceEvent.EVENT_IN_PROGRESS == event.getEventCode()) {
ThreadWatcher progressStatus = event.getThreadWatcher();
String statusText = "Retrieved " + progressStatus.getCompletedThreads() + " of " + progressStatus.getThreadCount() + " ACLs";
updateProgressDialog(statusText, "", (int) progressStatus.getCompletedThreads());
}
else if (ServiceEvent.EVENT_COMPLETED == event.getEventCode()) {
stopProgressDialog();
}
else if (ServiceEvent.EVENT_CANCELLED == event.getEventCode()) {
stopProgressDialog();
}
else if (ServiceEvent.EVENT_ERROR == event.getEventCode()) {
stopProgressDialog();
String message = "Unable to lookup Access Control list for objects";
log.error(message, event.getErrorCause());
ErrorDialog.showDialog(ownerFrame, this, message, event.getErrorCause());
}
}
/**
* This method is an {@link S3ServiceEventListener} action method that is invoked when this
* application's <code>S3ServiceMulti</code> triggers a <code>UpdateACLEvent</code>.
* <p>
* This method merely updates the progress dialog as ACLs are updated.
*
* @param event
*/
public void s3ServiceEventPerformed(UpdateACLEvent event) {
if (ServiceEvent.EVENT_STARTED == event.getEventCode()) {
startProgressDialog(
"Updated 0 of " + event.getThreadWatcher().getThreadCount() + " ACLs",
"", 0, (int) event.getThreadWatcher().getThreadCount(), "Cancel Update",
event.getThreadWatcher().getCancelEventListener());
}
else if (ServiceEvent.EVENT_IN_PROGRESS == event.getEventCode()) {
ThreadWatcher progressStatus = event.getThreadWatcher();
String statusText = "Updated " + progressStatus.getCompletedThreads() + " of " + progressStatus.getThreadCount() + " ACLs";
updateProgressDialog(statusText, "", (int) progressStatus.getCompletedThreads());
}
else if (ServiceEvent.EVENT_COMPLETED == event.getEventCode()) {
stopProgressDialog();
}
else if (ServiceEvent.EVENT_CANCELLED == event.getEventCode()) {
stopProgressDialog();
}
else if (ServiceEvent.EVENT_ERROR == event.getEventCode()) {
stopProgressDialog();
String message = "Unable to update Access Control Lists";
log.error(message, event.getErrorCause());
ErrorDialog.showDialog(ownerFrame, this, message, event.getErrorCause());
}
}
/**
* Downloads the objects currently selected in the objects table. The user is
* prompted
* Prepares to perform a download of objects from S3 by prompting the user for a directory
* to store the files in, then performing the download.
*
* @throws IOException
*/
private void downloadSelectedObjects() {
// Prompt user to choose directory location for downloaded files (or cancel download altogether)
JFileChooser fileChooser = new JFileChooser();
fileChooser.setDialogTitle("Choose directory to save S3 files in");
fileChooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
fileChooser.setMultiSelectionEnabled(false);
fileChooser.setSelectedFile(downloadDirectory);
int returnVal = fileChooser.showDialog(ownerFrame, "Choose Directory");
if (returnVal != JFileChooser.APPROVE_OPTION) {
return;
}
downloadDirectory = fileChooser.getSelectedFile();
// Find clashing files
final Map filesAlreadyInDownloadDirectoryMap = new HashMap();
S3Object[] objectsForDownload = getSelectedObjects();
for (int i = 0; i < objectsForDownload.length; i++) {
File file = new File(downloadDirectory,
objectsForDownload[i].getKey());
if (file.exists()) {
filesAlreadyInDownloadDirectoryMap.put(
objectsForDownload[i].getKey(), file);
}
}
// Build map of S3 Objects being downloaded.
final Map s3DownloadObjectsMap = FileComparer.getInstance()
.populateS3ObjectMap("", objectsForDownload);
final HyperlinkActivatedListener hyperlinkListener = this;
runInBackgroundThread(new Runnable() {
public void run() {
// Retrieve details of objects for download
if (!retrieveObjectsDetails(getSelectedObjects())) {
return;
}
try {
final FileComparerResults comparisonResults = compareRemoteAndLocalFiles(
filesAlreadyInDownloadDirectoryMap,
s3DownloadObjectsMap);
DownloadPackage[] downloadPackages =
buildDownloadPackageList(comparisonResults, s3DownloadObjectsMap);
if (downloadPackages == null) {
return;
}
s3ServiceMulti.downloadObjects(getCurrentSelectedBucket(),
downloadPackages);
} catch (final Exception e) {
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
String message = "Unable to download objects";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame,
hyperlinkListener, message, e);
}
});
}
}
});
}
private void uploadFiles(File[] uploadFiles) {
// Fail if encryption is turned on but no password is available.
if (cockpitPreferences.isUploadEncryptionActive()
&& !cockpitPreferences.isEncryptionPasswordSet())
{
ErrorDialog.showDialog(ownerFrame, this,
"Upload encryption is enabled but you have not yet set a password in the Encryption Preferences.",
null);
return;
}
try {
// Build map of files proposed for upload.
final Map filesForUploadMap = FileComparer.getInstance()
.buildFileMap(uploadFiles, false);
// Build map of objects already existing in target S3 bucket with keys
// matching the proposed upload keys.
List objectsWithExistingKeys = new ArrayList();
S3Object[] existingObjects = objectTableModel.getObjects();
for (int i = 0; i < existingObjects.length; i++) {
if (filesForUploadMap.keySet().contains(existingObjects[i].getKey()))
{
objectsWithExistingKeys.add(existingObjects[i]);
}
}
existingObjects = (S3Object[]) objectsWithExistingKeys
.toArray(new S3Object[objectsWithExistingKeys.size()]);
final Map s3ExistingObjectsMap = FileComparer.getInstance()
.populateS3ObjectMap("", existingObjects);
final HyperlinkActivatedListener hyperlinkListener = this;
final S3Object[] clashingObjects = existingObjects;
runInBackgroundThread(new Runnable() {
public void run() {
if (clashingObjects.length > 0) {
// Retrieve details of potential clashes
if (!retrieveObjectsDetails(clashingObjects)) {
return;
}
}
try {
FileComparerResults comparisonResults =
compareRemoteAndLocalFiles(filesForUploadMap, s3ExistingObjectsMap);
S3Object[] uploadObjects = buildUploadObjectsList(
comparisonResults, filesForUploadMap);
if (uploadObjects == null) {
return;
}
// Upload the files.
s3ServiceMulti.putObjects(getCurrentSelectedBucket(), uploadObjects);
} catch (final Exception e) {
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
String message = "Unable to upload objects";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, hyperlinkListener, message, e);
}
});
}
}
});
} catch (Exception e) {
String message = "Unable to upload objects";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
}
private FileComparerResults compareRemoteAndLocalFiles(final Map localFilesMap, final Map s3ObjectsMap)
throws Exception
{
try {
// Compare objects being downloaded and existing local files.
final String statusText =
"Comparing " + s3ObjectsMap.size() + " object" + (s3ObjectsMap.size() > 1 ? "s" : "") +
" in S3 with " + localFilesMap.size() + " local file" + (localFilesMap.size() > 1 ? "s" : "");
startProgressDialog(statusText, "", 0, 100, null, null);
// Calculate total files size.
File[] files = (File[]) localFilesMap.values().toArray(new File[localFilesMap.size()]);
final long filesSizeTotal[] = new long[1];
for (int i = 0; i < files.length; i++) {
filesSizeTotal[0] += files[i].length();
}
// Monitor generation of MD5 hash, and provide feedback via the progress bar.
BytesProgressWatcher progressWatcher = new BytesProgressWatcher(filesSizeTotal[0]) {
public void updateBytesTransferred(long byteCount) {
super.updateBytesTransferred(byteCount);
String detailsText = formatBytesProgressWatcherDetails(this, true);
int progressValue = (int)((double)getBytesTransferred() * 100 / getBytesToTransfer());
updateProgressDialog(statusText, detailsText, progressValue);
}
};
FileComparerResults comparisonResults = FileComparer.getInstance()
.buildDiscrepancyLists(localFilesMap, s3ObjectsMap, progressWatcher);
stopProgressDialog();
return comparisonResults;
} finally {
stopProgressDialog();
}
}
/**
* Performs the real work of downloading files by comparing the download candidates against
* existing files, prompting the user whether to overwrite any pre-existing file versions,
* and starting {@link S3ServiceMulti#downloadObjects} where the real work is done.
*
*/
private DownloadPackage[] buildDownloadPackageList(FileComparerResults comparisonResults,
Map s3DownloadObjectsMap) throws Exception
{
// Determine which files to download, prompting user whether to over-write existing files
List objectKeysForDownload = new ArrayList();
objectKeysForDownload.addAll(comparisonResults.onlyOnServerKeys);
int newFiles = comparisonResults.onlyOnServerKeys.size();
int unchangedFiles = comparisonResults.alreadySynchronisedKeys.size();
int changedFiles = comparisonResults.updatedOnClientKeys.size()
+ comparisonResults.updatedOnServerKeys.size();
if (unchangedFiles > 0 || changedFiles > 0) {
// Ask user whether to replace existing unchanged and/or existing changed files.
log.debug("Files for download clash with existing local files, prompting user to choose which files to replace");
List options = new ArrayList();
String message = "Of the " + (newFiles + unchangedFiles + changedFiles)
+ " objects being downloaded:\n\n";
if (newFiles > 0) {
message += newFiles + " files are new.\n\n";
options.add(DOWNLOAD_NEW_FILES_ONLY);
}
if (changedFiles > 0) {
message += changedFiles + " files have changed.\n\n";
options.add(DOWNLOAD_NEW_AND_CHANGED_FILES);
}
if (unchangedFiles > 0) {
message += unchangedFiles + " files already exist and are unchanged.\n\n";
options.add(DOWNLOAD_ALL_FILES);
}
message += "Please choose which files you wish to download:";
Object response = JOptionPane.showInputDialog(
ownerFrame, message, "Replace files?", JOptionPane.QUESTION_MESSAGE,
null, options.toArray(), DOWNLOAD_NEW_AND_CHANGED_FILES);
if (response == null) {
return null;
}
if (DOWNLOAD_NEW_FILES_ONLY.equals(response)) {
// No change required to default objectKeysForDownload list.
} else if (DOWNLOAD_ALL_FILES.equals(response)) {
objectKeysForDownload.addAll(comparisonResults.updatedOnClientKeys);
objectKeysForDownload.addAll(comparisonResults.updatedOnServerKeys);
objectKeysForDownload.addAll(comparisonResults.alreadySynchronisedKeys);
} else if (DOWNLOAD_NEW_AND_CHANGED_FILES.equals(response)) {
objectKeysForDownload.addAll(comparisonResults.updatedOnClientKeys);
objectKeysForDownload.addAll(comparisonResults.updatedOnServerKeys);
} else {
// Download cancelled.
return null;
}
}
log.debug("Downloading " + objectKeysForDownload.size() + " objects");
if (objectKeysForDownload.size() == 0) {
return null;
}
// Create array of objects for download.
S3Object[] objects = new S3Object[objectKeysForDownload.size()];
int objectIndex = 0;
for (Iterator iter = objectKeysForDownload.iterator(); iter.hasNext();) {
objects[objectIndex++] = (S3Object) s3DownloadObjectsMap.get(iter.next());
}
Map downloadObjectsToFileMap = new HashMap();
ArrayList downloadPackageList = new ArrayList();
// Setup files to write to, creating parent directories when necessary.
for (int i = 0; i < objects.length; i++) {
File file = new File(downloadDirectory, objects[i].getKey());
// Encryption password must be null if no password is set.
String encryptionPassword = null;
if (cockpitPreferences.isEncryptionPasswordSet()) {
encryptionPassword = cockpitPreferences.getEncryptionPassword();
}
// Create local directories corresponding to objects flagged as dirs.
if (Mimetypes.MIMETYPE_JETS3T_DIRECTORY.equals(objects[i].getContentType())) {
file.mkdirs();
}
DownloadPackage downloadPackage = ObjectUtils
.createPackageForDownload(objects[i], file, true, true, encryptionPassword);
if (downloadPackage != null) {
downloadObjectsToFileMap.put(objects[i].getKey(), file);
downloadPackageList.add(downloadPackage);
}
}
return (DownloadPackage[]) downloadPackageList
.toArray(new DownloadPackage[downloadPackageList.size()]);
}
/**
* This method is an {@link S3ServiceEventListener} action method that is invoked when this
* application's <code>S3ServiceMulti</code> triggers a <code>DownloadObjectsEvent</code>.
* <p>
* This method merely updates the progress dialog as objects are downloaded.
*
* @param event
*/
public void s3ServiceEventPerformed(DownloadObjectsEvent event) {
if (ServiceEvent.EVENT_STARTED == event.getEventCode()) {
ThreadWatcher watcher = event.getThreadWatcher();
// Show percentage of bytes transferred, if this info is available.
if (watcher.isBytesTransferredInfoAvailable()) {
startProgressDialog("Downloaded " +
watcher.getCompletedThreads() + "/" + watcher.getThreadCount() + " - " +
byteFormatter.formatByteSize(watcher.getBytesTransferred())
+ " of " + byteFormatter.formatByteSize(watcher.getBytesTotal()),
"", 0, 100, "Cancel Download",
watcher.getCancelEventListener());
// ... otherwise just show the number of completed threads.
} else {
startProgressDialog("Downloaded " + watcher.getCompletedThreads()
+ " of " + watcher.getThreadCount() + " objects",
"", 0, (int) watcher.getThreadCount(), "Cancel Download",
watcher.getCancelEventListener());
}
}
else if (ServiceEvent.EVENT_IN_PROGRESS == event.getEventCode()) {
ThreadWatcher watcher = event.getThreadWatcher();
// Show percentage of bytes transferred, if this info is available.
if (watcher.isBytesTransferredInfoAvailable()) {
String bytesCompletedStr = byteFormatter.formatByteSize(watcher.getBytesTransferred());
String bytesTotalStr = byteFormatter.formatByteSize(watcher.getBytesTotal());
String statusText = "Downloaded " +
watcher.getCompletedThreads() + "/" + watcher.getThreadCount() + " - " +
bytesCompletedStr + " of " + bytesTotalStr;
String detailsText = formatTransferDetails(watcher);
int percentage = (int)
(((double)watcher.getBytesTransferred() / watcher.getBytesTotal()) * 100);
updateProgressDialog(statusText, detailsText, percentage);
}
// ... otherwise just show the number of completed threads.
else {
ThreadWatcher progressStatus = event.getThreadWatcher();
String statusText = "Downloaded " + progressStatus.getCompletedThreads()
+ " of " + progressStatus.getThreadCount() + " objects";
updateProgressDialog(statusText, "", (int) progressStatus.getCompletedThreads());
}
} else if (ServiceEvent.EVENT_COMPLETED == event.getEventCode()) {
stopProgressDialog();
}
else if (ServiceEvent.EVENT_CANCELLED == event.getEventCode()) {
stopProgressDialog();
}
else if (ServiceEvent.EVENT_ERROR == event.getEventCode()) {
stopProgressDialog();
String message = "Unable to download objects";
log.error(message, event.getErrorCause());
ErrorDialog.showDialog(ownerFrame, this, message, event.getErrorCause());
}
}
private S3Object[] buildUploadObjectsList(FileComparerResults comparisonResults,
Map uploadingFilesMap) throws Exception
{
// Determine which files to upload, prompting user whether to over-write existing files
List fileKeysForUpload = new ArrayList();
fileKeysForUpload.addAll(comparisonResults.onlyOnClientKeys);
int newFiles = comparisonResults.onlyOnClientKeys.size();
int unchangedFiles = comparisonResults.alreadySynchronisedKeys.size();
int changedFiles = comparisonResults.updatedOnClientKeys.size()
+ comparisonResults.updatedOnServerKeys.size();
if (unchangedFiles > 0 || changedFiles > 0) {
// Ask user whether to replace existing unchanged and/or existing changed files.
log.debug("Files for upload clash with existing S3 objects, prompting user to choose which files to replace");
List options = new ArrayList();
String message = "Of the " + uploadingFilesMap.size()
+ " files being uploaded:\n\n";
if (newFiles > 0) {
message += newFiles + " files are new.\n\n";
options.add(UPLOAD_NEW_FILES_ONLY);
}
if (changedFiles > 0) {
message += changedFiles + " files have changed.\n\n";
options.add(UPLOAD_NEW_AND_CHANGED_FILES);
}
if (unchangedFiles > 0) {
message += unchangedFiles + " files already exist and are unchanged.\n\n";
options.add(UPLOAD_ALL_FILES);
}
message += "Please choose which files you wish to upload:";
Object response = JOptionPane.showInputDialog(
ownerFrame, message, "Replace files?", JOptionPane.QUESTION_MESSAGE,
null, options.toArray(), UPLOAD_NEW_AND_CHANGED_FILES);
if (response == null) {
return null;
}
if (UPLOAD_NEW_FILES_ONLY.equals(response)) {
// No change required to default fileKeysForUpload list.
} else if (UPLOAD_ALL_FILES.equals(response)) {
fileKeysForUpload.addAll(comparisonResults.updatedOnClientKeys);
fileKeysForUpload.addAll(comparisonResults.updatedOnServerKeys);
fileKeysForUpload.addAll(comparisonResults.alreadySynchronisedKeys);
} else if (UPLOAD_NEW_AND_CHANGED_FILES.equals(response)) {
fileKeysForUpload.addAll(comparisonResults.updatedOnClientKeys);
fileKeysForUpload.addAll(comparisonResults.updatedOnServerKeys);
} else {
// Upload cancelled.
stopProgressDialog();
return null;
}
}
if (fileKeysForUpload.size() == 0) {
return null;
}
final String[] statusText = new String[1];
statusText[0] = "Prepared 0 of " + fileKeysForUpload.size() + " files for upload";
startProgressDialog(statusText[0], "", 0, 100, null, null);
long bytesToProcess = 0;
for (Iterator iter = fileKeysForUpload.iterator(); iter.hasNext();) {
File file = (File) uploadingFilesMap.get(iter.next().toString());
bytesToProcess += file.length() *
(cockpitPreferences.isUploadEncryptionActive() || cockpitPreferences.isUploadCompressionActive() ? 3 : 1);
}
BytesProgressWatcher progressWatcher = new BytesProgressWatcher(bytesToProcess) {
public void updateBytesTransferred(long byteCount) {
super.updateBytesTransferred(byteCount);
String detailsText = formatBytesProgressWatcherDetails(this, false);
int progressValue = (int)((double)getBytesTransferred() * 100 / getBytesToTransfer());
updateProgressDialog(statusText[0], detailsText, progressValue);
}
};
// Populate S3Objects representing upload files with metadata etc.
final S3Object[] objects = new S3Object[fileKeysForUpload.size()];
int objectIndex = 0;
for (Iterator iter = fileKeysForUpload.iterator(); iter.hasNext();) {
String fileKey = iter.next().toString();
File file = (File) uploadingFilesMap.get(fileKey);
S3Object newObject = ObjectUtils
.createObjectForUpload(fileKey, file,
(cockpitPreferences.isUploadEncryptionActive() ? encryptionUtil : null),
cockpitPreferences.isUploadCompressionActive(), progressWatcher);
String aclPreferenceString = cockpitPreferences.getUploadACLPermission();
if (CockpitPreferences.UPLOAD_ACL_PERMISSION_PRIVATE.equals(aclPreferenceString)) {
// Objects are private by default, nothing more to do.
} else if (CockpitPreferences.UPLOAD_ACL_PERMISSION_PUBLIC_READ.equals(aclPreferenceString)) {
newObject.setAcl(AccessControlList.REST_CANNED_PUBLIC_READ);
} else if (CockpitPreferences.UPLOAD_ACL_PERMISSION_PUBLIC_READ_WRITE.equals(aclPreferenceString)) {
newObject.setAcl(AccessControlList.REST_CANNED_PUBLIC_READ_WRITE);
} else {
log.warn("Ignoring unrecognised upload ACL permission setting: " + aclPreferenceString);
}
statusText[0] = "Prepared " + (objectIndex + 1)
+ " of " + fileKeysForUpload.size() + " files for upload";
objects[objectIndex++] = newObject;
}
stopProgressDialog();
return objects;
}
/**
* This method is an {@link S3ServiceEventListener} action method that is invoked when this
* application's <code>S3ServiceMulti</code> triggers a <code>CreateObjectsEvent</code>.
* <p>
* This method merely updates the progress dialog as files are uploaded.
*
* @param event
*/
public void s3ServiceEventPerformed(final CreateObjectsEvent event) {
if (ServiceEvent.EVENT_STARTED == event.getEventCode()) {
ThreadWatcher watcher = event.getThreadWatcher();
// Show percentage of bytes transferred, if this info is available.
if (watcher.isBytesTransferredInfoAvailable()) {
String bytesTotalStr = byteFormatter.formatByteSize(watcher.getBytesTotal());
String statusText = "Uploaded " +
watcher.getCompletedThreads() + "/" + watcher.getThreadCount() + " - " +
"0 of " + bytesTotalStr;
startProgressDialog(statusText, " ", 0, 100, "Cancel Upload",
event.getThreadWatcher().getCancelEventListener());
}
// ... otherwise show the number of completed threads.
else {
startProgressDialog(
"Uploaded 0 of " + watcher.getThreadCount() + " objects",
"", (int) watcher.getCompletedThreads(), (int) watcher.getThreadCount(),
"Cancel upload", event.getThreadWatcher().getCancelEventListener());
}
}
else if (ServiceEvent.EVENT_IN_PROGRESS == event.getEventCode()) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
for (int i = 0; i < event.getCreatedObjects().length; i++) {
S3Object object = event.getCreatedObjects()[i];
object.setBucketName(getCurrentSelectedBucket().getName());
objectTableModel.addObject(object);
}
if (event.getCreatedObjects().length > 0) {
updateObjectsSummary(true);
}
}
});
ThreadWatcher watcher = event.getThreadWatcher();
// Show percentage of bytes transferred, if this info is available.
if (watcher.isBytesTransferredInfoAvailable()) {
if (watcher.getBytesTransferred() >= watcher.getBytesTotal()) {
// Upload is completed, just waiting on resonse from S3.
String statusText = "Upload completed, awaiting confirmation";
updateProgressDialog(statusText, "", 100);
} else {
String bytesCompletedStr = byteFormatter.formatByteSize(watcher.getBytesTransferred());
String bytesTotalStr = byteFormatter.formatByteSize(watcher.getBytesTotal());
String statusText = "Uploaded " +
watcher.getCompletedThreads() + "/" + watcher.getThreadCount() + " - " +
bytesCompletedStr + " of " + bytesTotalStr;
int percentage = (int)
(((double)watcher.getBytesTransferred() / watcher.getBytesTotal()) * 100);
String detailsText = formatTransferDetails(watcher);
updateProgressDialog(statusText, detailsText, percentage);
}
}
// ... otherwise show the number of completed threads.
else {
ThreadWatcher progressStatus = event.getThreadWatcher();
String statusText = "Uploaded " + progressStatus.getCompletedThreads() +
" of " + progressStatus.getThreadCount() + " objects";
updateProgressDialog(statusText, "", (int) progressStatus.getCompletedThreads());
}
}
else if (ServiceEvent.EVENT_COMPLETED == event.getEventCode()) {
stopProgressDialog();
SwingUtilities.invokeLater(new Runnable() {
public void run() {
updateObjectsSummary(false);
S3Object[] allObjects = objectTableModel.getObjects();
cachedBuckets.put(getCurrentSelectedBucket().getName(), allObjects);
}
});
}
else if (ServiceEvent.EVENT_CANCELLED == event.getEventCode()) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
updateObjectsSummary(false);
}
});
stopProgressDialog();
}
else if (ServiceEvent.EVENT_ERROR == event.getEventCode()) {
stopProgressDialog();
String message = "Unable to upload objects";
log.error(message, event.getErrorCause());
ErrorDialog.showDialog(ownerFrame, this, message, event.getErrorCause());
}
}
private void copyObjects() {
try {
final S3Object[] sourceObjects = getSelectedObjects();
CopyObjectsDialog dialog = new CopyObjectsDialog(ownerFrame,
"Copy or Move Objects", skinsFactory,
sourceObjects, bucketTableModel.getBuckets());
dialog.setVisible(true);
if (dialog.isCopyActionApproved()) {
final String currentBucketName = getCurrentSelectedBucket().getName();
final String destinationBucketName = dialog.getDestinationBucketName();
final String[] sourceObjectKeys = dialog.getSourceObjectKeys();
final S3Object[] destinationObjects = dialog.getDestinationObjects();
final boolean isDeleteAfterCopy = dialog.isMoveOptionSelected();
final boolean retainAcls = dialog.isCopyOriginalAccessControlLists();
dialog.dispose();
if (!destinationBucketName.equals(currentBucketName)) {
cachedBuckets.remove(destinationBucketName);
}
runInBackgroundThread(new Runnable() {
public void run() {
if (retainAcls) {
// Retain ACL settings from original objects.
if (!s3ServiceMulti.getObjectACLs(
getCurrentSelectedBucket(), sourceObjects))
{
return;
}
for (int i = 0; i < sourceObjects.length; i++) {
destinationObjects[i].setAcl(
sourceObjects[i].getAcl());
}
}
// Copy objects. Metadata is retained, not replaced.
s3ServiceMulti.copyObjects(currentBucketName, destinationBucketName,
sourceObjectKeys, destinationObjects, false);
if (isDeleteAfterCopy) {
final S3Object[] sourceObjects = new S3Object[sourceObjectKeys.length];
for (int i = 0; i < sourceObjectKeys.length; i++) {
sourceObjects[i] = new S3Object(sourceObjectKeys[i]);
}
s3ServiceMulti.deleteObjects(getCurrentSelectedBucket(), sourceObjects);
}
if (destinationBucketName.equals(currentBucketName) || isDeleteAfterCopy) {
// Refesh object listing for current bucket if the bucket's contents
// have changed.
listObjects();
}
}
});
} else {
dialog.dispose();
}
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
stopProgressDialog();
String message = "Unable to modify objects";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
}
/**
* This method is an {@link S3ServiceEventListener} action method that is invoked when this
* application's <code>S3ServiceMulti</code> triggers a <code>CopyObjectsEvent</code>.
* <p>
* This method merely updates the progress dialog as objects are copied.
*
* @param event
*/
public void s3ServiceEventPerformed(final CopyObjectsEvent event) {
if (ServiceEvent.EVENT_STARTED == event.getEventCode()) {
ThreadWatcher watcher = event.getThreadWatcher();
startProgressDialog("Copied 0 of " + watcher.getThreadCount() + " objects",
"", 0, (int) watcher.getThreadCount(),
"Cancel Copy", event.getThreadWatcher().getCancelEventListener());
}
else if (ServiceEvent.EVENT_IN_PROGRESS == event.getEventCode()) {
ThreadWatcher watcher = event.getThreadWatcher();
String statusText = "Copied " + watcher.getCompletedThreads()
+ " of " + watcher.getThreadCount() + " objects";
updateProgressDialog(statusText, "", (int) watcher.getCompletedThreads());
}
else if (ServiceEvent.EVENT_COMPLETED == event.getEventCode()
|| ServiceEvent.EVENT_CANCELLED == event.getEventCode())
{
stopProgressDialog();
}
else if (ServiceEvent.EVENT_ERROR == event.getEventCode()) {
stopProgressDialog();
String message = "Unable to copy objects";
log.error(message, event.getErrorCause());
ErrorDialog.showDialog(ownerFrame, this, message, event.getErrorCause());
}
}
private void generatePublicGetUrls() {
final S3Object[] objects = getSelectedObjects();
if (objects.length < 1) {
log.warn("Ignoring Generate Public URLs object command because no objects are selected");
return;
}
SignedGetUrlDialog dialog = new SignedGetUrlDialog(ownerFrame, this,
s3ServiceMulti.getS3Service(), objects);
dialog.setVisible(true);
}
private void generateTorrentUrl() {
final S3Object[] objects = getSelectedObjects();
if (objects.length != 1) {
log.warn("Ignoring Generate Public URL object command, can only operate on a single object");
return;
}
S3Object currentObject = objects[0];
// Generate URL
String torrentUrl = S3Service.createTorrentUrl(
getCurrentSelectedBucket().getName(), currentObject.getKey());
// Display signed URL
JOptionPane.showInputDialog(ownerFrame,
"Torrent URL for '" + currentObject.getKey() + "'.",
"Torrent URL", JOptionPane.INFORMATION_MESSAGE, null, null, torrentUrl);
}
private void deleteSelectedObjects() {
final S3Object[] objects = getSelectedObjects();
if (objects.length == 0) {
log.warn("Ignoring delete objects command, no currently selected objects");
return;
}
int response = JOptionPane.showConfirmDialog(ownerFrame,
(objects.length == 1 ?
"Are you sure you want to delete '" + objects[0].getKey() + "'?" :
"Are you sure you want to delete " + objects.length + " objects"
),
"Delete Objects?", JOptionPane.YES_NO_OPTION);
if (response == JOptionPane.NO_OPTION) {
return;
}
runInBackgroundThread(new Runnable() {
public void run() {
s3ServiceMulti.deleteObjects(getCurrentSelectedBucket(), objects);
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
updateObjectsSummary(false);
S3Object[] allObjects = objectTableModel.getObjects();
cachedBuckets.put(getCurrentSelectedBucket().getName(), allObjects);
}
});
}
});
}
/**
* This method is an {@link S3ServiceEventListener} action method that is invoked when this
* application's <code>S3ServiceMulti</code> triggers a <code>DeleteObjectsEvent</code>.
* <p>
* This method merely updates the progress dialog as objects are deleted.
*
* @param event
*/
public void s3ServiceEventPerformed(final DeleteObjectsEvent event) {
if (ServiceEvent.EVENT_STARTED == event.getEventCode()) {
startProgressDialog(
"Deleted 0 of " + event.getThreadWatcher().getThreadCount() + " objects",
"", 0, (int) event.getThreadWatcher().getThreadCount(), "Cancel Delete Objects",
event.getThreadWatcher().getCancelEventListener());
}
else if (ServiceEvent.EVENT_IN_PROGRESS == event.getEventCode()) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
for (int i = 0; i < event.getDeletedObjects().length; i++) {
objectTableModel.removeObject(
event.getDeletedObjects()[i]);
}
if (event.getDeletedObjects().length > 0) {
updateObjectsSummary(true);
}
}
});
ThreadWatcher progressStatus = event.getThreadWatcher();
String statusText = "Deleted " + progressStatus.getCompletedThreads()
+ " of " + progressStatus.getThreadCount() + " objects";
updateProgressDialog(statusText, "", (int) progressStatus.getCompletedThreads());
}
else if (ServiceEvent.EVENT_COMPLETED == event.getEventCode()) {
stopProgressDialog();
}
else if (ServiceEvent.EVENT_CANCELLED == event.getEventCode()) {
stopProgressDialog();
}
else if (ServiceEvent.EVENT_ERROR == event.getEventCode()) {
stopProgressDialog();
String message = "Unable to delete objects";
log.error(message, event.getErrorCause());
ErrorDialog.showDialog(ownerFrame, this, message, event.getErrorCause());
}
}
/**
* Retrieves details about objects including metadata etc by invoking the method
* {@link S3ServiceMulti#getObjectsHeads}.
*
* This is generally done as a prelude to some further action, such as
* displaying the objects' details or downloading the objects.
* The real action occurs in the method <code>s3ServiceEventPerformed</code> for handling
* <code>GetObjectHeadsEvent</code> events.
*
* @param candidateObjects
*
* @return
* true if objects details were successfully retrieved.
*/
private boolean retrieveObjectsDetails(final S3Object[] candidateObjects) {
// Identify which of the candidate objects have incomplete metadata.
ArrayList s3ObjectsIncompleteList = new ArrayList();
for (int i = 0; i < candidateObjects.length; i++) {
if (!candidateObjects[i].isMetadataComplete()) {
s3ObjectsIncompleteList.add(candidateObjects[i]);
}
}
log.debug("Of " + candidateObjects.length + " object candidates for HEAD requests "
+ s3ObjectsIncompleteList.size() + " are incomplete, performing requests for these only");
final S3Object[] incompleteObjects = (S3Object[]) s3ObjectsIncompleteList
.toArray(new S3Object[s3ObjectsIncompleteList.size()]);
return s3ServiceMulti.getObjectsHeads(getCurrentSelectedBucket(), incompleteObjects);
}
/**
* This method is an {@link S3ServiceEventListener} action method that is invoked when this
* application's <code>S3ServiceMulti</code> triggers a <code>GetObjectHeadsEvent</code>.
* <p>
* This method merely updates the progress dialog as object details (heads) are retrieved.
*
* @param event
*/
public void s3ServiceEventPerformed(final GetObjectHeadsEvent event) {
if (ServiceEvent.EVENT_STARTED == event.getEventCode()) {
if (event.getThreadWatcher().getThreadCount() > 0) {
startProgressDialog("Retrieved details for 0 of "
+ event.getThreadWatcher().getThreadCount() + " objects",
"", 0, (int) event.getThreadWatcher().getThreadCount(), "Cancel Retrieval",
event.getThreadWatcher().getCancelEventListener());
}
}
else if (ServiceEvent.EVENT_IN_PROGRESS == event.getEventCode()) {
final ThreadWatcher progressStatus = event.getThreadWatcher();
// Store detail-complete objects in table.
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
// Update object in table with the retrieved details.
for (int i = 0; i < event.getCompletedObjects().length; i++) {
S3Object objectWithDetails = event.getCompletedObjects()[i];
S3Object originalObject = objectTableModel.getObjectByKey(
objectWithDetails.getKey());
originalObject.replaceAllMetadata(objectWithDetails.getMetadataMap());
originalObject.setMetadataComplete(true);
log.debug("Updated table with " + originalObject.getKey()
+ ", content-type=" + originalObject.getContentType());
}
}
});
// Update progress of GetObject requests.
String statusText = "Retrieved details for " + progressStatus.getCompletedThreads()
+ " of " + progressStatus.getThreadCount() + " objects";
updateProgressDialog(statusText, "", (int) progressStatus.getCompletedThreads());
}
else if (ServiceEvent.EVENT_COMPLETED == event.getEventCode()) {
// Stop GetObjectHead progress display.
stopProgressDialog();
}
else if (ServiceEvent.EVENT_CANCELLED == event.getEventCode()) {
stopProgressDialog();
}
else if (ServiceEvent.EVENT_ERROR == event.getEventCode()) {
stopProgressDialog();
String message = "Unable to retrieve objects details";
log.error(message, event.getErrorCause());
ErrorDialog.showDialog(ownerFrame, this, message, event.getErrorCause());
}
}
private String formatTransferDetails(ThreadWatcher watcher) {
long bytesPerSecond = watcher.getBytesPerSecond();
String detailsText = byteFormatter.formatByteSize(bytesPerSecond) + "/s";
if (watcher.isTimeRemainingAvailable()) {
long secondsRemaining = watcher.getTimeRemaining();
detailsText += " - Time remaining: " + timeFormatter.formatTime(secondsRemaining);
}
return detailsText;
}
private String formatBytesProgressWatcherDetails(BytesProgressWatcher watcher, boolean includeBytes) {
long secondsRemaining = watcher.getRemainingTime();
String detailsText =
(includeBytes
? byteFormatter.formatByteSize(watcher.getBytesTransferred())
+ " of " + byteFormatter.formatByteSize(watcher.getBytesToTransfer())
+ " - "
: "")
+ "Time remaining: " +
timeFormatter.formatTime(secondsRemaining);
return detailsText;
}
/**
* Follows hyperlinks clicked on by a user. This is achieved differently depending on whether
* Cockpit is running as an applet or as a stand-alone application:
* <ul>
* <li>Application: Detects the default browser application for the user's system (using
* <tt>BareBonesBrowserLaunch</tt>) and opens the link as a new window in that browser</li>
* <li>Applet: Opens the link in the current browser using the applet's context</li>
* </ul>
*
* @param url
* the url to open
* @param target
* the target pane to open the url in, eg "_blank". This may be null.
*/
public void followHyperlink(URL url, String target) {
if (!isStandAloneApplication) {
if (target == null) {
getAppletContext().showDocument(url);
} else {
getAppletContext().showDocument(url, target);
}
} else {
BareBonesBrowserLaunch.openURL(url.toString());
}
}
/**
* Implementation method for the CredentialsProvider interface.
* <p>
* Based on sample code:
* <a href="http://svn.apache.org/viewvc/jakarta/commons/proper/httpclient/trunk/src/examples/InteractiveAuthenticationExample.java?view=markup">InteractiveAuthenticationExample</a>
*
*/
public Credentials getCredentials(AuthScheme authscheme, String host, int port, boolean proxy) throws CredentialsNotAvailableException {
if (authscheme == null) {
return null;
}
try {
Credentials credentials = null;
if (authscheme instanceof NTLMScheme) {
AuthenticationDialog pwDialog = new AuthenticationDialog(
ownerFrame, "Authentication Required",
"<html>Host <b>" + host + ":" + port + "</b> requires Windows authentication</html>", true);
pwDialog.setVisible(true);
if (pwDialog.getUser().length() > 0) {
credentials = new NTCredentials(pwDialog.getUser(), pwDialog.getPassword(),
host, pwDialog.getDomain());
}
pwDialog.dispose();
} else
if (authscheme instanceof RFC2617Scheme) {
AuthenticationDialog pwDialog = new AuthenticationDialog(
ownerFrame, "Authentication Required",
"<html><center>Host <b>" + host + ":" + port + "</b>"
+ " requires authentication for the realm:<br><b>" + authscheme.getRealm() + "</b></center></html>", false);
pwDialog.setVisible(true);
if (pwDialog.getUser().length() > 0) {
credentials = new UsernamePasswordCredentials(pwDialog.getUser(), pwDialog.getPassword());
}
pwDialog.dispose();
} else {
throw new CredentialsNotAvailableException("Unsupported authentication scheme: " +
authscheme.getSchemeName());
}
return credentials;
} catch (IOException e) {
throw new CredentialsNotAvailableException(e.getMessage(), e);
}
}
private boolean isObjectFilteringActive() {
if (!filterObjectsCheckBox.isSelected()) {
return false;
} else {
String delimiter = (String) filterObjectsDelimiter.getSelectedItem();
if (filterObjectsPrefix.getText().length() > 0
|| delimiter.length() > 0)
{
return true;
} else {
return false;
}
}
}
private class ContextMenuListener extends MouseAdapter {
public void mousePressed(MouseEvent e) {
showContextMenu(e);
}
public void mouseReleased(MouseEvent e) {
showContextMenu(e);
}
private void showContextMenu(MouseEvent e) {
if (e.isPopupTrigger()) {
// Select item under context-click.
if (e.getSource() instanceof JList) {
JList jList = (JList) e.getSource();
int locIndex = jList.locationToIndex(e.getPoint());
if (locIndex >= 0) {
jList.setSelectedIndex(locIndex);
}
} else if (e.getSource() instanceof JTable) {
JTable jTable = (JTable) e.getSource();
int rowIndex = jTable.rowAtPoint(e.getPoint());
if (rowIndex >= 0) {
jTable.addRowSelectionInterval(rowIndex, rowIndex);
}
}
// Show context popup menu.
if (e.getSource().equals(bucketsTable)) {
showBucketPopupMenu((JComponent)e.getSource(), e.getX(), e.getY());
} else if (e.getSource().equals(objectsTable)) {
showObjectPopupMenu((JComponent)e.getSource(), e.getX(), e.getY());
}
}
}
}
/**
* Runs Cockpit as a stand-alone application.
* @param args
* @throws Exception
*/
public static void main(String args[]) throws Exception {
JFrame ownerFrame = new JFrame("JetS3t Cockpit");
ownerFrame.addWindowListener(new WindowListener() {
public void windowOpened(WindowEvent e) {
}
public void windowClosing(WindowEvent e) {
e.getWindow().dispose();
}
public void windowClosed(WindowEvent e) {
}
public void windowIconified(WindowEvent e) {
}
public void windowDeiconified(WindowEvent e) {
}
public void windowActivated(WindowEvent e) {
}
public void windowDeactivated(WindowEvent e) {
}
});
new Cockpit(ownerFrame);
}
}
| src/org/jets3t/apps/cockpit/Cockpit.java | /*
* jets3t : Java Extra-Tasty S3 Toolkit (for Amazon S3 online storage service)
* This is a java.net project, see https://jets3t.dev.java.net/
*
* Copyright 2008 James Murty
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jets3t.apps.cockpit;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Frame;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.Rectangle;
import java.awt.datatransfer.DataFlavor;
import java.awt.dnd.DnDConstants;
import java.awt.dnd.DropTarget;
import java.awt.dnd.DropTargetDragEvent;
import java.awt.dnd.DropTargetDropEvent;
import java.awt.dnd.DropTargetEvent;
import java.awt.dnd.DropTargetListener;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.WindowEvent;
import java.awt.event.WindowListener;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import javax.swing.JApplet;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JComboBox;
import javax.swing.JComponent;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import javax.swing.JScrollPane;
import javax.swing.JSeparator;
import javax.swing.JSplitPane;
import javax.swing.JTable;
import javax.swing.JTextField;
import javax.swing.ListSelectionModel;
import javax.swing.SwingUtilities;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.table.DefaultTableCellRenderer;
import javax.swing.table.TableColumn;
import org.apache.commons.httpclient.Credentials;
import org.apache.commons.httpclient.NTCredentials;
import org.apache.commons.httpclient.UsernamePasswordCredentials;
import org.apache.commons.httpclient.auth.AuthScheme;
import org.apache.commons.httpclient.auth.CredentialsNotAvailableException;
import org.apache.commons.httpclient.auth.CredentialsProvider;
import org.apache.commons.httpclient.auth.NTLMScheme;
import org.apache.commons.httpclient.auth.RFC2617Scheme;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jets3t.apps.cockpit.gui.AccessControlDialog;
import org.jets3t.apps.cockpit.gui.BucketLoggingDialog;
import org.jets3t.apps.cockpit.gui.BucketTableModel;
import org.jets3t.apps.cockpit.gui.CreateBucketDialog;
import org.jets3t.apps.cockpit.gui.ObjectTableModel;
import org.jets3t.apps.cockpit.gui.PreferencesDialog;
import org.jets3t.apps.cockpit.gui.RequesterPaysDialog;
import org.jets3t.apps.cockpit.gui.SignedGetUrlDialog;
import org.jets3t.apps.cockpit.gui.StartupDialog;
import org.jets3t.gui.AuthenticationDialog;
import org.jets3t.gui.CopyObjectsDialog;
import org.jets3t.gui.ErrorDialog;
import org.jets3t.gui.GuiUtils;
import org.jets3t.gui.HyperlinkActivatedListener;
import org.jets3t.gui.ItemPropertiesDialog;
import org.jets3t.gui.JHtmlLabel;
import org.jets3t.gui.ManageDistributionsDialog;
import org.jets3t.gui.ObjectsAttributesDialog;
import org.jets3t.gui.ProgressDialog;
import org.jets3t.gui.TableSorter;
import org.jets3t.gui.skins.SkinsFactory;
import org.jets3t.service.CloudFrontService;
import org.jets3t.service.CloudFrontServiceException;
import org.jets3t.service.Constants;
import org.jets3t.service.Jets3tProperties;
import org.jets3t.service.S3ObjectsChunk;
import org.jets3t.service.S3Service;
import org.jets3t.service.S3ServiceException;
import org.jets3t.service.acl.AccessControlList;
import org.jets3t.service.impl.rest.httpclient.RestS3Service;
import org.jets3t.service.io.BytesProgressWatcher;
import org.jets3t.service.model.S3Bucket;
import org.jets3t.service.model.S3Object;
import org.jets3t.service.model.cloudfront.Distribution;
import org.jets3t.service.multithread.CancelEventTrigger;
import org.jets3t.service.multithread.CopyObjectsEvent;
import org.jets3t.service.multithread.CreateBucketsEvent;
import org.jets3t.service.multithread.CreateObjectsEvent;
import org.jets3t.service.multithread.DeleteObjectsEvent;
import org.jets3t.service.multithread.DownloadObjectsEvent;
import org.jets3t.service.multithread.DownloadPackage;
import org.jets3t.service.multithread.GetObjectHeadsEvent;
import org.jets3t.service.multithread.GetObjectsEvent;
import org.jets3t.service.multithread.ListObjectsEvent;
import org.jets3t.service.multithread.LookupACLEvent;
import org.jets3t.service.multithread.S3ServiceEventListener;
import org.jets3t.service.multithread.S3ServiceMulti;
import org.jets3t.service.multithread.ServiceEvent;
import org.jets3t.service.multithread.ThreadWatcher;
import org.jets3t.service.multithread.UpdateACLEvent;
import org.jets3t.service.security.AWSCredentials;
import org.jets3t.service.security.EncryptionUtil;
import org.jets3t.service.utils.ByteFormatter;
import org.jets3t.service.utils.FileComparer;
import org.jets3t.service.utils.FileComparerResults;
import org.jets3t.service.utils.Mimetypes;
import org.jets3t.service.utils.ObjectUtils;
import org.jets3t.service.utils.TimeFormatter;
import com.centerkey.utils.BareBonesBrowserLaunch;
/**
* Cockpit is a graphical Java application for viewing and managing the contents of an Amazon S3 account.
* For more information and help please see the
* <a href="http://jets3t.s3.amazonaws.com/applications/cockpit.html">Cockpit Guide</a>.
* <p>
* This is the Cockpit application class; it may be run as a stand-alone application or as an Applet.
*
* @author jmurty
*/
public class Cockpit extends JApplet implements S3ServiceEventListener, ActionListener,
ListSelectionListener, HyperlinkActivatedListener, CredentialsProvider
{
private static final long serialVersionUID = 1275456909864052884L;
private static final Log log = LogFactory.getLog(Cockpit.class);
public static final String JETS3T_COCKPIT_HELP_PAGE = "http://jets3t.s3.amazonaws.com/applications/cockpit.html";
public static final String AMAZON_S3_PAGE = "http://www.amazon.com/s3";
public static final String APPLICATION_DESCRIPTION = "Cockpit/0.7.0";
public static final String APPLICATION_TITLE = "JetS3t Cockpit";
private static final int BUCKET_LIST_CHUNKING_SIZE = 1000;
private File cockpitHomeDirectory = Constants.DEFAULT_PREFERENCES_DIRECTORY;
private CockpitPreferences cockpitPreferences = null;
private final Insets insetsZero = new Insets(0, 0, 0, 0);
private final Insets insetsDefault = new Insets(5, 7, 5, 7);
private final ByteFormatter byteFormatter = new ByteFormatter();
private final TimeFormatter timeFormatter = new TimeFormatter();
private final SimpleDateFormat yearAndTimeSDF = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private final SimpleDateFormat timeSDF = new SimpleDateFormat("HH:mm:ss");
private final GuiUtils guiUtils = new GuiUtils();
/**
* Multi-threaded S3 service used by the application.
*/
private S3ServiceMulti s3ServiceMulti = null;
private CloudFrontService cloudFrontService = null;
private Frame ownerFrame = null;
private boolean isStandAloneApplication = false;
// Service main menu items
private JMenuItem loginMenuItem = null;
private JMenuItem logoutMenuItem = null;
// Bucket main menu items
private JPopupMenu bucketActionMenu = null;
private JMenuItem viewBucketPropertiesMenuItem = null;
private JMenuItem refreshBucketMenuItem = null;
private JMenuItem createBucketMenuItem = null;
private JMenuItem manageDistributionsMenuItem = null;
private JMenuItem updateBucketACLMenuItem = null;
private JMenuItem updateBucketRequesterPaysStatusMenuItem = null;
private JMenuItem deleteBucketMenuItem = null;
// Object main menu items
private JPopupMenu objectActionMenu = null;
private JMenuItem refreshObjectMenuItem = null;
private JMenuItem viewOrModifyObjectAttributesMenuItem = null;
private JMenuItem copyObjectsMenuItem = null;
private JMenuItem updateObjectACLMenuItem = null;
private JMenuItem downloadObjectMenuItem = null;
private JMenuItem uploadFilesMenuItem = null;
private JMenuItem generatePublicGetUrls = null;
private JMenuItem generateTorrentUrl = null;
private JMenuItem deleteObjectMenuItem = null;
// Tools menu items.
private JMenuItem bucketLoggingMenuItem = null;
// Preference menu items.
private JMenuItem preferencesDialogMenuItem = null;
// Help menu items.
private JMenuItem cockpitHelpMenuItem = null;
private JMenuItem amazonS3HelpMenuItem = null;
// Tables
private JTable bucketsTable = null;
private JTable objectsTable = null;
private JScrollPane objectsTableSP = null;
private BucketTableModel bucketTableModel = null;
private TableSorter bucketTableModelSorter = null;
private ObjectTableModel objectTableModel = null;
private TableSorter objectTableModelSorter = null;
private JLabel objectsSummaryLabel = null;
private HashMap cachedBuckets = new HashMap();
private ProgressDialog progressDialog = null;
private ObjectsAttributesDialog objectsAttributesDialog = null;
private File downloadDirectory = null;
private File fileChoosersLastUploadDirectory = null;
private JPanel filterObjectsPanel = null;
private JCheckBox filterObjectsCheckBox = null;
private JTextField filterObjectsPrefix = null;
private JComboBox filterObjectsDelimiter = null;
// File comparison options
private static final String UPLOAD_NEW_FILES_ONLY = "Only upload new files";
private static final String UPLOAD_NEW_AND_CHANGED_FILES = "Upload new and changed files";
private static final String UPLOAD_ALL_FILES = "Upload all files";
private static final String DOWNLOAD_NEW_FILES_ONLY = "Only download new files";
private static final String DOWNLOAD_NEW_AND_CHANGED_FILES = "Download new and changed files";
private static final String DOWNLOAD_ALL_FILES = "Download all files";
private EncryptionUtil encryptionUtil = null;
private Jets3tProperties cockpitProperties = null;
private SkinsFactory skinsFactory = null;
/**
* Constructor to run this application as an Applet.
*/
public Cockpit() {
}
/**
* Constructor to run this application in a stand-alone window.
*
* @param ownerFrame the frame the application will be displayed in
* @throws S3ServiceException
*/
public Cockpit(JFrame ownerFrame) throws S3ServiceException {
this.ownerFrame = ownerFrame;
isStandAloneApplication = true;
init();
ownerFrame.getContentPane().add(this);
ownerFrame.setBounds(this.getBounds());
ownerFrame.setVisible(true);
}
/**
* Prepares application to run as a GUI by finding/creating a root owner JFrame, creating an
* un-authenticated {@link RestS3Service} and loading properties files.
*/
public void init() {
super.init();
// Find or create a Frame to own modal dialog boxes.
if (this.ownerFrame == null) {
Component c = this;
while (!(c instanceof Frame) && c.getParent() != null) {
c = c.getParent();
}
if (!(c instanceof Frame)) {
this.ownerFrame = new JFrame();
} else {
this.ownerFrame = (Frame) c;
}
}
// Initialise the GUI.
initGui();
// Initialise a non-authenticated service.
try {
// Revert to anonymous service.
s3ServiceMulti = new S3ServiceMulti(
new RestS3Service(null, APPLICATION_DESCRIPTION, this), this);
cloudFrontService = null;
} catch (S3ServiceException e) {
String message = "Unable to start anonymous service";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
// Load Cockpit configuration files from cockpit's home directory.
File mimeTypesFile = new File(cockpitHomeDirectory, "mime.types");
if (mimeTypesFile.exists()) {
try {
Mimetypes.getInstance().loadAndReplaceMimetypes(
new FileInputStream(mimeTypesFile));
} catch (IOException e) {
String message = "Unable to load mime.types file: " + mimeTypesFile;
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
}
File jets3tPropertiesFile = new File(cockpitHomeDirectory, "jets3t.properties");
if (jets3tPropertiesFile.exists()) {
try {
Jets3tProperties.getInstance(Constants.JETS3T_PROPERTIES_FILENAME)
.loadAndReplaceProperties(new FileInputStream(jets3tPropertiesFile),
"jets3t.properties in Cockpit's home folder " + cockpitHomeDirectory);
} catch (IOException e) {
String message = "Unable to load jets3t.properties file: " + jets3tPropertiesFile;
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
}
// Initialise the user's preferences.
this.cockpitPreferences = new CockpitPreferences();
File cockpitPreferencesPropertiesFile = new File(cockpitHomeDirectory, Constants.COCKPIT_PROPERTIES_FILENAME);
if (cockpitPreferencesPropertiesFile.exists()) {
try {
Properties properties = new Properties();
properties.load(new FileInputStream(cockpitPreferencesPropertiesFile));
this.cockpitPreferences.fromProperties(properties);
} catch (IOException e) {
String message = "Unable to load your preferences";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
}
cockpitProperties = Jets3tProperties.getInstance(Constants.JETS3T_PROPERTIES_FILENAME);
skinsFactory = SkinsFactory.getInstance(cockpitProperties.getProperties());
SwingUtilities.invokeLater(new Runnable() {
public void run() {
loginEvent();
}
});
}
/**
* Initialises the application's GUI elements.
*/
private void initGui() {
initMenus();
JPanel appContent = new JPanel(new GridBagLayout());
this.getContentPane().add(appContent);
// Buckets panel.
JPanel bucketsPanel = new JPanel(new GridBagLayout());
JButton bucketActionButton = new JButton();
bucketActionButton.setToolTipText("Bucket actions menu");
guiUtils.applyIcon(bucketActionButton, "/images/nuvola/16x16/actions/misc.png");
bucketActionButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
JButton sourceButton = (JButton) e.getSource();
bucketActionMenu.show(sourceButton, 0, sourceButton.getHeight());
}
});
bucketsPanel.add(new JHtmlLabel("<html><b>Buckets</b></html>", this),
new GridBagConstraints(0, 0, 1, 1, 1, 0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, insetsZero, 0, 0));
bucketsPanel.add(bucketActionButton,
new GridBagConstraints(1, 0, 1, 1, 0, 0, GridBagConstraints.EAST, GridBagConstraints.HORIZONTAL, insetsZero, 0, 0));
bucketTableModel = new BucketTableModel();
bucketTableModelSorter = new TableSorter(bucketTableModel);
bucketsTable = new JTable(bucketTableModelSorter);
bucketTableModelSorter.setTableHeader(bucketsTable.getTableHeader());
bucketsTable.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
bucketsTable.getSelectionModel().addListSelectionListener(this);
bucketsTable.setShowHorizontalLines(true);
bucketsTable.setShowVerticalLines(false);
// Set column width for Cloud Front distributions indicator.
TableColumn distributionFlagColumn = bucketsTable.getColumnModel().getColumn(1);
int distributionFlagColumnWidth = 18;
distributionFlagColumn.setPreferredWidth(distributionFlagColumnWidth);
distributionFlagColumn.setMaxWidth(distributionFlagColumnWidth);
distributionFlagColumn.setMinWidth(0);
bucketsTable.addMouseListener(new ContextMenuListener());
bucketsPanel.add(new JScrollPane(bucketsTable),
new GridBagConstraints(0, 1, 2, 1, 1, 1, GridBagConstraints.CENTER, GridBagConstraints.BOTH, insetsZero, 0, 0));
bucketsPanel.add(new JLabel(" "),
new GridBagConstraints(0, 2, 2, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, insetsDefault, 0, 0));
// Filter panel.
filterObjectsPanel = new JPanel(new GridBagLayout());
filterObjectsPrefix = new JTextField();
filterObjectsPrefix.setToolTipText("Only show objects with this prefix");
filterObjectsPrefix.addActionListener(this);
filterObjectsPrefix.setActionCommand("RefreshObjects");
filterObjectsDelimiter = new JComboBox(new String[] {"", "/", "?", "\\"});
filterObjectsDelimiter.setEditable(true);
filterObjectsDelimiter.setToolTipText("Object name delimiter");
filterObjectsDelimiter.addActionListener(this);
filterObjectsDelimiter.setActionCommand("RefreshObjects");
filterObjectsPanel.add(new JHtmlLabel("Prefix:", this),
new GridBagConstraints(0, 0, 1, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, insetsZero, 0, 0));
filterObjectsPanel.add(filterObjectsPrefix,
new GridBagConstraints(1, 0, 1, 1, 1, 0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, insetsDefault, 0, 0));
filterObjectsPanel.add(new JHtmlLabel("Delimiter:", this),
new GridBagConstraints(2, 0, 1, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, insetsDefault, 0, 0));
filterObjectsPanel.add(filterObjectsDelimiter,
new GridBagConstraints(3, 0, 1, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.NONE, insetsZero, 0, 0));
filterObjectsPanel.setVisible(false);
// Objects panel.
JPanel objectsPanel = new JPanel(new GridBagLayout());
int row = 0;
filterObjectsCheckBox = new JCheckBox("Filter objects");
filterObjectsCheckBox.addActionListener(this);
filterObjectsCheckBox.setToolTipText("Check this option to filter the objects listed");
objectsPanel.add(new JHtmlLabel("<html><b>Objects</b></html>", this),
new GridBagConstraints(0, row, 1, 1, 1, 0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, insetsZero, 0, 0));
objectsPanel.add(filterObjectsCheckBox,
new GridBagConstraints(1, row, 1, 1, 0, 0, GridBagConstraints.EAST, GridBagConstraints.HORIZONTAL, insetsZero, 0, 0));
JButton objectActionButton = new JButton();
objectActionButton.setToolTipText("Object actions menu");
guiUtils.applyIcon(objectActionButton, "/images/nuvola/16x16/actions/misc.png");
objectActionButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
JButton sourceButton = (JButton) e.getSource();
objectActionMenu.show(sourceButton, 0, sourceButton.getHeight());
}
});
objectsPanel.add(objectActionButton,
new GridBagConstraints(2, row, 1, 1, 0, 0, GridBagConstraints.EAST, GridBagConstraints.HORIZONTAL, insetsZero, 0, 0));
objectsPanel.add(filterObjectsPanel,
new GridBagConstraints(0, ++row, 3, 1, 0, 0, GridBagConstraints.WEST, GridBagConstraints.HORIZONTAL, insetsZero, 0, 0));
objectsTable = new JTable();
objectTableModel = new ObjectTableModel();
objectTableModelSorter = new TableSorter(objectTableModel);
objectTableModelSorter.setTableHeader(objectsTable.getTableHeader());
objectsTable.setModel(objectTableModelSorter);
objectsTable.setDefaultRenderer(Long.class, new DefaultTableCellRenderer() {
private static final long serialVersionUID = 301092191828910402L;
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) {
String formattedSize = byteFormatter.formatByteSize(((Long)value).longValue());
return super.getTableCellRendererComponent(table, formattedSize, isSelected, hasFocus, row, column);
}
});
objectsTable.setDefaultRenderer(Date.class, new DefaultTableCellRenderer() {
private static final long serialVersionUID = 7285511556343895652L;
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) {
Date date = (Date) value;
return super.getTableCellRendererComponent(table, yearAndTimeSDF.format(date), isSelected, hasFocus, row, column);
}
});
objectsTable.setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION);
objectsTable.getSelectionModel().addListSelectionListener(this);
objectsTable.setShowHorizontalLines(true);
objectsTable.setShowVerticalLines(true);
objectsTable.addMouseListener(new ContextMenuListener());
objectsTableSP = new JScrollPane(objectsTable);
objectsPanel.add(objectsTableSP,
new GridBagConstraints(0, ++row, 3, 1, 1, 1, GridBagConstraints.CENTER, GridBagConstraints.BOTH, insetsZero, 0, 0));
objectsSummaryLabel = new JHtmlLabel("Please select a bucket", this);
objectsSummaryLabel.setHorizontalAlignment(JLabel.CENTER);
objectsSummaryLabel.setFocusable(false);
objectsPanel.add(objectsSummaryLabel,
new GridBagConstraints(0, ++row, 3, 1, 1, 0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, insetsDefault, 0, 0));
// Combine sections.
JSplitPane splitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT,
bucketsPanel, objectsPanel);
splitPane.setOneTouchExpandable(true);
splitPane.setContinuousLayout(true);
appContent.add(splitPane,
new GridBagConstraints(0, 0, 1, 1, 1, 1, GridBagConstraints.CENTER, GridBagConstraints.BOTH, insetsDefault, 0, 0));
// Set preferred sizes
int preferredWidth = 800;
int preferredHeight = 600;
this.setBounds(new Rectangle(new Dimension(preferredWidth, preferredHeight)));
splitPane.setResizeWeight(0.30);
// Initialize drop target.
initDropTarget(new JComponent[] {objectsTableSP, objectsTable} );
objectsTable.getDropTarget().setActive(false);
objectsTableSP.getDropTarget().setActive(false);
}
/**
* Initialise the application's menu bar.
*/
private void initMenus() {
JMenuBar appMenuBar = new JMenuBar();
this.setJMenuBar(appMenuBar);
// Service menu
JMenu serviceMenu = new JMenu("Service");
loginMenuItem = new JMenuItem("Log in...");
loginMenuItem.setActionCommand("LoginEvent");
loginMenuItem.addActionListener(this);
guiUtils.applyIcon(loginMenuItem, "/images/nuvola/16x16/actions/connect_creating.png");
serviceMenu.add(loginMenuItem);
logoutMenuItem = new JMenuItem("Log out");
logoutMenuItem.setActionCommand("LogoutEvent");
logoutMenuItem.addActionListener(this);
guiUtils.applyIcon(logoutMenuItem, "/images/nuvola/16x16/actions/connect_no.png");
serviceMenu.add(logoutMenuItem);
if (isStandAloneApplication) {
serviceMenu.add(new JSeparator());
JMenuItem quitMenuItem = new JMenuItem("Quit");
quitMenuItem.setActionCommand("QuitEvent");
quitMenuItem.addActionListener(this);
guiUtils.applyIcon(quitMenuItem, "/images/nuvola/16x16/actions/exit.png");
serviceMenu.add(quitMenuItem);
}
loginMenuItem.setEnabled(true);
logoutMenuItem.setEnabled(false);
// Bucket action menu.
bucketActionMenu = new JPopupMenu();
refreshBucketMenuItem = new JMenuItem("Refresh bucket listing");
refreshBucketMenuItem.setActionCommand("RefreshBuckets");
refreshBucketMenuItem.addActionListener(this);
guiUtils.applyIcon(refreshBucketMenuItem, "/images/nuvola/16x16/actions/reload.png");
bucketActionMenu.add(refreshBucketMenuItem);
viewBucketPropertiesMenuItem = new JMenuItem("View bucket properties...");
viewBucketPropertiesMenuItem.setActionCommand("ViewBucketProperties");
viewBucketPropertiesMenuItem.addActionListener(this);
guiUtils.applyIcon(viewBucketPropertiesMenuItem, "/images/nuvola/16x16/actions/viewmag.png");
bucketActionMenu.add(viewBucketPropertiesMenuItem);
updateBucketACLMenuItem = new JMenuItem("Update bucket's Access Control List...");
updateBucketACLMenuItem.setActionCommand("UpdateBucketACL");
updateBucketACLMenuItem.addActionListener(this);
guiUtils.applyIcon(updateBucketACLMenuItem, "/images/nuvola/16x16/actions/encrypted.png");
bucketActionMenu.add(updateBucketACLMenuItem);
updateBucketRequesterPaysStatusMenuItem = new JMenuItem("Update bucket's Requester Pays status...");
updateBucketRequesterPaysStatusMenuItem.setActionCommand("UpdateBucketRequesterPaysStatus");
updateBucketRequesterPaysStatusMenuItem.addActionListener(this);
guiUtils.applyIcon(updateBucketRequesterPaysStatusMenuItem, "/images/nuvola/16x16/actions/identity.png");
bucketActionMenu.add(updateBucketRequesterPaysStatusMenuItem);
bucketActionMenu.add(new JSeparator());
createBucketMenuItem = new JMenuItem("Create new bucket...");
createBucketMenuItem.setActionCommand("CreateBucket");
createBucketMenuItem.addActionListener(this);
guiUtils.applyIcon(createBucketMenuItem, "/images/nuvola/16x16/actions/viewmag+.png");
bucketActionMenu.add(createBucketMenuItem);
JMenuItem thirdPartyBucketMenuItem = new JMenuItem("Add third-party bucket...");
thirdPartyBucketMenuItem.setActionCommand("AddThirdPartyBucket");
thirdPartyBucketMenuItem.addActionListener(this);
guiUtils.applyIcon(thirdPartyBucketMenuItem, "/images/nuvola/16x16/actions/viewmagfit.png");
bucketActionMenu.add(thirdPartyBucketMenuItem);
bucketActionMenu.add(new JSeparator());
deleteBucketMenuItem = new JMenuItem("Delete bucket...");
deleteBucketMenuItem.setActionCommand("DeleteBucket");
deleteBucketMenuItem.addActionListener(this);
guiUtils.applyIcon(deleteBucketMenuItem, "/images/nuvola/16x16/actions/cancel.png");
bucketActionMenu.add(deleteBucketMenuItem);
viewBucketPropertiesMenuItem.setEnabled(false);
refreshBucketMenuItem.setEnabled(false);
createBucketMenuItem.setEnabled(false);
updateBucketACLMenuItem.setEnabled(false);
updateBucketRequesterPaysStatusMenuItem.setEnabled(false);
deleteBucketMenuItem.setEnabled(false);
// Object action menu.
objectActionMenu = new JPopupMenu();
refreshObjectMenuItem = new JMenuItem("Refresh object listing");
refreshObjectMenuItem.setActionCommand("RefreshObjects");
refreshObjectMenuItem.addActionListener(this);
guiUtils.applyIcon(refreshObjectMenuItem, "/images/nuvola/16x16/actions/reload.png");
objectActionMenu.add(refreshObjectMenuItem);
viewOrModifyObjectAttributesMenuItem = new JMenuItem("View or Modify object attributes...");
viewOrModifyObjectAttributesMenuItem.setActionCommand("ViewOrModifyObjectAttributes");
viewOrModifyObjectAttributesMenuItem.addActionListener(this);
guiUtils.applyIcon(viewOrModifyObjectAttributesMenuItem, "/images/nuvola/16x16/actions/viewmag.png");
objectActionMenu.add(viewOrModifyObjectAttributesMenuItem);
copyObjectsMenuItem = new JMenuItem("Copy or Move objects...");
copyObjectsMenuItem.setActionCommand("CopyObjects");
copyObjectsMenuItem.addActionListener(this);
guiUtils.applyIcon(copyObjectsMenuItem, "/images/nuvola/16x16/actions/filenew.png");
objectActionMenu.add(copyObjectsMenuItem);
updateObjectACLMenuItem = new JMenuItem("View or Modify Access Control Lists...");
updateObjectACLMenuItem.setActionCommand("UpdateObjectACL");
updateObjectACLMenuItem.addActionListener(this);
guiUtils.applyIcon(updateObjectACLMenuItem, "/images/nuvola/16x16/actions/encrypted.png");
objectActionMenu.add(updateObjectACLMenuItem);
downloadObjectMenuItem = new JMenuItem("Download objects...");
downloadObjectMenuItem.setActionCommand("DownloadObjects");
downloadObjectMenuItem.addActionListener(this);
guiUtils.applyIcon(downloadObjectMenuItem, "/images/nuvola/16x16/actions/1downarrow.png");
objectActionMenu.add(downloadObjectMenuItem);
uploadFilesMenuItem = new JMenuItem("Upload files...");
uploadFilesMenuItem.setActionCommand("UploadFiles");
uploadFilesMenuItem.addActionListener(this);
guiUtils.applyIcon(uploadFilesMenuItem, "/images/nuvola/16x16/actions/1uparrow.png");
objectActionMenu.add(uploadFilesMenuItem);
objectActionMenu.add(new JSeparator());
generatePublicGetUrls = new JMenuItem("Generate Public GET URLs...");
generatePublicGetUrls.setActionCommand("GeneratePublicGetURLs");
generatePublicGetUrls.addActionListener(this);
guiUtils.applyIcon(generatePublicGetUrls, "/images/nuvola/16x16/actions/wizard.png");
objectActionMenu.add(generatePublicGetUrls);
generateTorrentUrl = new JMenuItem("Generate Torrent URL...");
generateTorrentUrl.setActionCommand("GenerateTorrentURL");
generateTorrentUrl.addActionListener(this);
guiUtils.applyIcon(generateTorrentUrl, "/images/nuvola/16x16/actions/wizard.png");
objectActionMenu.add(generateTorrentUrl);
objectActionMenu.add(new JSeparator());
deleteObjectMenuItem = new JMenuItem("Delete objects...");
deleteObjectMenuItem.setActionCommand("DeleteObjects");
deleteObjectMenuItem.addActionListener(this);
guiUtils.applyIcon(deleteObjectMenuItem, "/images/nuvola/16x16/actions/cancel.png");
objectActionMenu.add(deleteObjectMenuItem);
viewOrModifyObjectAttributesMenuItem.setEnabled(false);
copyObjectsMenuItem.setEnabled(false);
refreshObjectMenuItem.setEnabled(false);
updateObjectACLMenuItem.setEnabled(false);
downloadObjectMenuItem.setEnabled(false);
uploadFilesMenuItem.setEnabled(false);
generatePublicGetUrls.setEnabled(false);
generateTorrentUrl.setEnabled(false);
deleteObjectMenuItem.setEnabled(false);
// Tools menu.
JMenu toolsMenu = new JMenu("Tools");
bucketLoggingMenuItem = new JMenuItem("Configure Bucket logging...");
bucketLoggingMenuItem.setActionCommand("BucketLogging");
bucketLoggingMenuItem.addActionListener(this);
bucketLoggingMenuItem.setEnabled(false);
guiUtils.applyIcon(bucketLoggingMenuItem, "/images/nuvola/16x16/actions/toggle_log.png");
toolsMenu.add(bucketLoggingMenuItem);
manageDistributionsMenuItem = new JMenuItem("Manage CloudFront Distributions...");
manageDistributionsMenuItem.setActionCommand("ManageDistributions");
manageDistributionsMenuItem.addActionListener(this);
guiUtils.applyIcon(manageDistributionsMenuItem, "/images/nuvola/16x16/actions/irkick.png");
manageDistributionsMenuItem.setEnabled(false);
toolsMenu.add(manageDistributionsMenuItem);
toolsMenu.add(new JSeparator());
preferencesDialogMenuItem = new JMenuItem("Preferences...");
preferencesDialogMenuItem.setActionCommand("PreferencesDialog");
preferencesDialogMenuItem.addActionListener(this);
guiUtils.applyIcon(preferencesDialogMenuItem, "/images/nuvola/16x16/actions/configure.png");
toolsMenu.add(preferencesDialogMenuItem);
// Help menu.
JMenu helpMenu = new JMenu("Help");
cockpitHelpMenuItem = new JMenuItem("Cockpit Guide");
guiUtils.applyIcon(cockpitHelpMenuItem, "/images/nuvola/16x16/actions/help.png");
cockpitHelpMenuItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
try {
followHyperlink(new URL(JETS3T_COCKPIT_HELP_PAGE), "_blank");
} catch (MalformedURLException ex) {
throw new IllegalStateException("Invalid URL embedded in program: "
+ JETS3T_COCKPIT_HELP_PAGE);
}
}
});
helpMenu.add(cockpitHelpMenuItem);
amazonS3HelpMenuItem = new JMenuItem("Amazon S3");
guiUtils.applyIcon(amazonS3HelpMenuItem, "/images/nuvola/16x16/actions/gohome.png");
amazonS3HelpMenuItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
try {
followHyperlink(new URL(AMAZON_S3_PAGE), "_blank");
} catch (MalformedURLException ex) {
throw new IllegalStateException("Invalid URL embedded in program: "
+ AMAZON_S3_PAGE);
}
}
});
helpMenu.add(amazonS3HelpMenuItem);
// Build application menu bar.
appMenuBar.add(serviceMenu);
appMenuBar.add(toolsMenu);
appMenuBar.add(helpMenu);
}
/**
* Initialise the application's File drop targets for drag and drop copying of local files
* to S3.
*
* @param dropTargetComponents
* the components files can be dropped on to transfer them to S3
*/
private void initDropTarget(JComponent[] dropTargetComponents) {
DropTargetListener dropTargetListener = new DropTargetListener() {
private boolean checkValidDrag(DropTargetDragEvent dtde) {
if (dtde.isDataFlavorSupported(DataFlavor.javaFileListFlavor)
&& (DnDConstants.ACTION_COPY == dtde.getDropAction()
|| DnDConstants.ACTION_MOVE == dtde.getDropAction()))
{
dtde.acceptDrag(dtde.getDropAction());
return true;
} else {
dtde.rejectDrag();
return false;
}
}
public void dragEnter(DropTargetDragEvent dtde) {
if (checkValidDrag(dtde)) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
objectsTable.requestFocusInWindow();
};
});
}
}
public void dragOver(DropTargetDragEvent dtde) {
checkValidDrag(dtde);
}
public void dropActionChanged(DropTargetDragEvent dtde) {
if (checkValidDrag(dtde)) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
objectsTable.requestFocusInWindow();
};
});
} else {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
ownerFrame.requestFocusInWindow();
};
});
}
}
public void dragExit(DropTargetEvent dte) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
ownerFrame.requestFocusInWindow();
};
});
}
public void drop(DropTargetDropEvent dtde) {
if (dtde.isDataFlavorSupported(DataFlavor.javaFileListFlavor)
&& (DnDConstants.ACTION_COPY == dtde.getDropAction()
|| DnDConstants.ACTION_MOVE == dtde.getDropAction()))
{
dtde.acceptDrop(dtde.getDropAction());
try {
final List fileList = (List) dtde.getTransferable().getTransferData(
DataFlavor.javaFileListFlavor);
if (fileList != null && fileList.size() > 0) {
uploadFiles((File[]) fileList.toArray(new File[fileList.size()]));
}
} catch (Exception e) {
String message = "Unable to start accept dropped items";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, null, message, e);
}
} else {
dtde.rejectDrop();
}
}
};
// Attach drop target listener to each target component.
for (int i = 0; i < dropTargetComponents.length; i++) {
new DropTarget(dropTargetComponents[i], DnDConstants.ACTION_COPY, dropTargetListener, true);
}
}
/**
* Run the provided Runnable object in a background thread. This method will
* return as soon as the background thread is started, it does not wait for
* the thread to complete.
*/
private synchronized void runInBackgroundThread(Runnable runnable) {
Thread t = new Thread(runnable);
t.start();
}
/**
* Run the provided runnable in the application's event dispatcher thread,
* and wait for the action to complete before returning.
*
* @param runnable
* @return
*/
private synchronized boolean runInDispatcherThreadImmediately(Runnable runnable) {
try {
SwingUtilities.invokeAndWait(runnable);
return true;
} catch (Exception e) {
log.error("Error displaying graphical elements", e);
return false;
}
}
/**
* Starts a progress display dialog that cannot be cancelled. While the dialog is running the user
* cannot interact with the application.
*
* @param statusText
* describes the status of a task in text meaningful to the user
*/
private void startProgressDialog(String statusText) {
startProgressDialog(statusText, null, 0, 0, null, null);
}
/**
* Starts a progress display dialog. While the dialog is running the user cannot interact
* with the application, except to cancel the task.
*
* @param statusMessage
* describes the status of a task text meaningful to the user, such as "3 files of 7 uploaded"
* @param detailsText
* describes the status of a task in more detail, such as the current transfer rate and Time remaining.
* @param minTaskValue the minimum progress value for a task, generally 0
* @param maxTaskValue
* the maximum progress value for a task, such as the total number of threads or 100 if
* using percentage-complete as a metric.
* @param cancelEventListener
* listener that is responsible for cancelling a long-lived task when the user clicks
* the cancel button. If a task cannot be cancelled this must be null.
* @param cancelButtonText
* text displayed in the cancel button if a task can be cancelled. This is only used if
* a cancel event listener is provided.
*/
private void startProgressDialog(final String statusMessage, final String detailsText,
final int minTaskValue, final int maxTaskValue, final String cancelButtonText,
final CancelEventTrigger cancelEventListener)
{
if (this.progressDialog == null) {
this.progressDialog = new ProgressDialog(this.ownerFrame, "Please wait...", null);
}
SwingUtilities.invokeLater(new Runnable() {
public void run() {
progressDialog.startDialog(statusMessage, detailsText, minTaskValue, maxTaskValue,
cancelEventListener, cancelButtonText);
}
});
}
/**
* Updates the status text and value of the progress display dialog.
* @param statusMessage
* describes the status of a task text meaningful to the user, such as "3 files of 7 uploaded"
* @param detailsText
* describes the status of a task in more detail, such as the current transfer rate and time remaining.
* @param progressValue
* value representing how far through the task we are (relative to min and max values)
*/
private void updateProgressDialog(final String statusMessage, final String detailsText, final int progressValue) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
progressDialog.updateDialog(statusMessage, detailsText, progressValue);
}
});
}
/**
* Stops/halts the progress display dialog and allows the user to interact with the application.
*/
private void stopProgressDialog() {
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
progressDialog.stopDialog();
}
});
}
/**
* Event handler for this application, handles all menu items.
*/
public void actionPerformed(ActionEvent event) {
// Service Menu Events
if ("LoginEvent".equals(event.getActionCommand())) {
loginEvent();
} else if ("LogoutEvent".equals(event.getActionCommand())) {
logoutEvent();
} else if ("QuitEvent".equals(event.getActionCommand())) {
System.exit(0);
}
// Bucket Events.
else if ("ViewBucketProperties".equals(event.getActionCommand())) {
listBucketProperties();
} else if ("RefreshBuckets".equals(event.getActionCommand())) {
listAllBuckets();
} else if ("CreateBucket".equals(event.getActionCommand())) {
createBucketAction();
} else if ("DeleteBucket".equals(event.getActionCommand())) {
deleteSelectedBucket();
} else if ("ManageDistributions".equals(event.getActionCommand())) {
S3Bucket[] buckets = bucketTableModel.getBuckets();
String[] bucketNames = new String[buckets.length];
for (int i = 0; i < buckets.length; i++) {
bucketNames[i] = buckets[i].getName();
}
ManageDistributionsDialog.showDialog(ownerFrame, cloudFrontService, bucketNames, this);
} else if ("AddThirdPartyBucket".equals(event.getActionCommand())) {
addThirdPartyBucket();
} else if ("UpdateBucketACL".equals(event.getActionCommand())) {
updateBucketAccessControlList();
} else if ("UpdateBucketRequesterPaysStatus".equals(event.getActionCommand())) {
updateBucketRequesterPaysSetting();
}
// Object Events
else if ("ViewOrModifyObjectAttributes".equals(event.getActionCommand())) {
displayObjectsAttributesDialog();
} else if ("CopyObjects".equals(event.getActionCommand())) {
copyObjects();
} else if ("RefreshObjects".equals(event.getActionCommand())) {
listObjects();
} else if ("UpdateObjectACL".equals(event.getActionCommand())) {
displayAclModificationDialog();
} else if ("GeneratePublicGetURLs".equals(event.getActionCommand())) {
generatePublicGetUrls();
} else if ("GenerateTorrentURL".equals(event.getActionCommand())) {
generateTorrentUrl();
} else if ("DeleteObjects".equals(event.getActionCommand())) {
deleteSelectedObjects();
} else if ("DownloadObjects".equals(event.getActionCommand())) {
downloadSelectedObjects();
} else if ("UploadFiles".equals(event.getActionCommand())) {
JFileChooser fileChooser = new JFileChooser();
fileChooser.setMultiSelectionEnabled(true);
fileChooser.setDialogTitle("Choose files to upload");
fileChooser.setFileSelectionMode(JFileChooser.FILES_AND_DIRECTORIES);
fileChooser.setApproveButtonText("Upload files");
fileChooser.setCurrentDirectory(fileChoosersLastUploadDirectory);
int returnVal = fileChooser.showOpenDialog(ownerFrame);
if (returnVal != JFileChooser.APPROVE_OPTION) {
return;
}
final File[] uploadFiles = fileChooser.getSelectedFiles();
if (uploadFiles.length == 0) {
return;
}
// Save the chosen directory location for next time.
fileChoosersLastUploadDirectory = uploadFiles[0].getParentFile();
uploadFiles(uploadFiles);
} else if (event.getSource().equals(filterObjectsCheckBox)) {
if (filterObjectsCheckBox.isSelected()) {
filterObjectsPanel.setVisible(true);
} else {
filterObjectsPanel.setVisible(false);
filterObjectsPrefix.setText("");
if (filterObjectsDelimiter.getSelectedIndex() != 0) {
filterObjectsDelimiter.setSelectedIndex(0);
}
}
}
// Tools events
else if ("BucketLogging".equals(event.getActionCommand())) {
S3Bucket[] buckets = bucketTableModel.getBuckets();
BucketLoggingDialog.showDialog(ownerFrame, s3ServiceMulti.getS3Service(), buckets, this);
}
// Preference Events
else if ("PreferencesDialog".equals(event.getActionCommand())) {
PreferencesDialog.showDialog(cockpitPreferences, ownerFrame, this);
// Save a user's preferences if requested, otherwise wipe any existing preferences file.
File cockpitPreferencesPropertiesFile = new File(cockpitHomeDirectory, Constants.COCKPIT_PROPERTIES_FILENAME);
if (cockpitPreferences.isRememberPreferences()) {
try {
Properties properties = cockpitPreferences.toProperties();
if (!cockpitHomeDirectory.exists()) {
cockpitHomeDirectory.mkdir();
}
properties.list(new PrintStream(
new FileOutputStream(cockpitPreferencesPropertiesFile)));
} catch (IOException e) {
String message = "Unable to save your preferences";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
} else if (cockpitPreferencesPropertiesFile.exists()) {
// User elected not to store preferences, delete the existing preferences file.
cockpitPreferencesPropertiesFile.delete();
}
if (cockpitPreferences.isEncryptionPasswordSet()) {
try {
encryptionUtil = new EncryptionUtil(
cockpitPreferences.getEncryptionPassword(),
cockpitPreferences.getEncryptionAlgorithm(),
EncryptionUtil.DEFAULT_VERSION);
} catch (Exception e) {
String message = "Unable to start encryption utility";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
} else {
encryptionUtil = null;
}
}
// Ooops...
else {
log.warn("Unrecognised ActionEvent command '" + event.getActionCommand() + "' in " + event);
}
}
/**
* Handles list selection events for this application.
*/
public void valueChanged(ListSelectionEvent e) {
if (e.getValueIsAdjusting()) {
return;
}
if (e.getSource().equals(bucketsTable.getSelectionModel())) {
bucketSelectedAction();
} else if (e.getSource().equals(objectsTable.getSelectionModel())) {
objectSelectedAction();
}
}
/**
* Displays the {@link StartupDialog} dialog and, if the user provides login credentials,
* logs into the S3 service using those credentials.
*
* This method should always be run within the event dispatcher thread.
*/
private void loginEvent() {
try {
StartupDialog startupDialog = new StartupDialog(ownerFrame, cockpitProperties, this);
startupDialog.setVisible(true);
AWSCredentials awsCredentials = startupDialog.getAWSCredentials();
startupDialog.dispose();
if (awsCredentials == null) {
return;
}
s3ServiceMulti = new S3ServiceMulti(
new RestS3Service(awsCredentials, APPLICATION_DESCRIPTION, this), this);
cloudFrontService = new CloudFrontService(
awsCredentials, APPLICATION_DESCRIPTION, this, null, null);
try {
// Check that the user is actually signed-up for CloudFront.
cloudFrontService.listDistributions();
} catch (CloudFrontServiceException e) {
if ("OptInRequired".equals(e.getErrorCode())) {
log.warn("Your AWS account is not subscribed to the Amazon CloudFront service, "
+ "you will not be able to manage distributions");
}
cloudFrontService = null;
}
if (awsCredentials == null) {
log.debug("Log in cancelled by user");
return;
}
listAllBuckets();
objectsSummaryLabel.setText(" ");
loginMenuItem.setEnabled(false);
logoutMenuItem.setEnabled(true);
refreshBucketMenuItem.setEnabled(true);
createBucketMenuItem.setEnabled(true);
bucketLoggingMenuItem.setEnabled(true);
manageDistributionsMenuItem.setEnabled(cloudFrontService != null);
} catch (Exception e) {
String message = "Unable to log in to S3";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
logoutEvent();
}
}
/**
* Logs out of the S3 service by clearing all listed objects and buckets and resetting
* the s3ServiceMulti member variable.
*
* This method should always be invoked within the event dispatching thread.
*/
private void logoutEvent() {
log.debug("Logging out");
try {
// Revert to anonymous service.
s3ServiceMulti = new S3ServiceMulti(
new RestS3Service(null, APPLICATION_DESCRIPTION, this), this);
cloudFrontService = null;
bucketsTable.clearSelection();
bucketTableModel.removeAllBuckets();
objectTableModel.removeAllObjects();
objectsSummaryLabel.setText(" ");
ownerFrame.setTitle(APPLICATION_TITLE);
loginMenuItem.setEnabled(true);
logoutMenuItem.setEnabled(false);
refreshBucketMenuItem.setEnabled(false);
createBucketMenuItem.setEnabled(false);
bucketLoggingMenuItem.setEnabled(false);
manageDistributionsMenuItem.setEnabled(false);
} catch (Exception e) {
String message = "Unable to log out from S3";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
}
/**
* Displays the currently selected bucket's properties in the dialog {@link ItemPropertiesDialog}.
*/
private void listBucketProperties() {
final S3Bucket selectedBucket = getCurrentSelectedBucket();
if (selectedBucket.getAcl() == null || !selectedBucket.isLocationKnown()) {
// Retrieve all a bucket's details before displaying the summary.
runInBackgroundThread(new Runnable() {
public void run() {
startProgressDialog("Retrieving details for bucket " + selectedBucket.getName());
try {
try {
if (selectedBucket.getAcl() == null) {
selectedBucket.setAcl(
s3ServiceMulti.getS3Service().getBucketAcl(
selectedBucket));
}
if (!selectedBucket.isLocationKnown()) {
selectedBucket.setLocation(
s3ServiceMulti.getS3Service().getBucketLocation(
selectedBucket.getName()));
}
if (!selectedBucket.isRequesterPaysKnown()) {
selectedBucket.setRequesterPays(
s3ServiceMulti.getS3Service().isRequesterPaysBucket(
selectedBucket.getName()));
}
} catch (S3ServiceException e) {
// Retrieving details for a third-party bucket will
// often fail when ACL or Location is retrieved,
// ignore these failures.
}
stopProgressDialog();
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
ItemPropertiesDialog.showDialog(ownerFrame, selectedBucket, null);
}
});
} catch (final Exception e) {
stopProgressDialog();
String message = "Unable to retrieve details for bucket";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, null, message, e);
}
};
});
} else {
ItemPropertiesDialog.showDialog(ownerFrame, selectedBucket, null);
}
}
/**
* Displays the currently selected object's properties in the dialog {@link ObjectsAttributesDialog}.
* <p>
* As detailed information about the object may not yet be available, this method works
* indirectly via the {@link #retrieveObjectsDetails} method. The <code>retrieveObjectsDetails</code>
* method retrieves all the details for the currently selected objects, and once they are available
* knows to display the dialog as the {@link #isViewingOrModifyingObjectProperties} flag is set.
*/
private void displayObjectsAttributesDialog() {
runInBackgroundThread(new Runnable() {
public void run() {
if (!retrieveObjectsDetails(getSelectedObjects())) {
return;
}
if (objectsAttributesDialog == null) {
objectsAttributesDialog = new ObjectsAttributesDialog(
ownerFrame, "Object Attributes", skinsFactory);
}
final S3Object[] sourceObjects = getSelectedObjects();
boolean ok = runInDispatcherThreadImmediately(new Runnable() {
public void run() {
objectsAttributesDialog.displayDialog(sourceObjects, true);
}
});
if (!ok) {
return;
}
final String[] sourceObjectKeys = objectsAttributesDialog.getSourceObjectKeys();
final S3Object[] destinationObjects = objectsAttributesDialog.getDestinationObjects();
if (!objectsAttributesDialog.isModifyActionApproved()) {
// Do nothing.
return;
}
// Retain ACL settings from original objects.
if (!s3ServiceMulti.getObjectACLs(getCurrentSelectedBucket(), sourceObjects)) {
return;
}
for (int i = 0; i < sourceObjects.length; i++) {
destinationObjects[i].setAcl(
sourceObjects[i].getAcl());
}
// Copy objects in-place, to REPLACE their metadata attributes.
ok = s3ServiceMulti.copyObjects(
getCurrentSelectedBucket().getName(), getCurrentSelectedBucket().getName(),
sourceObjectKeys, destinationObjects, true);
// Refresh details for modified objects
if (ok) {
s3ServiceMulti.getObjectsHeads(
getCurrentSelectedBucket(), destinationObjects);
}
}
});
}
/**
* Lists the buckets in the user's S3 account and refreshes the GUI to display
* these buckets. Any buckets or objects already listed in the GUI are cleared first.
*/
private void listAllBuckets() {
// Remove current bucket and object data from models.
cachedBuckets.clear();
bucketsTable.clearSelection();
bucketTableModel.removeAllBuckets();
objectTableModel.removeAllObjects();
// This is all very convoluted. This was necessary so we can display the status dialog box.
runInBackgroundThread(new Runnable() {
public void run() {
startProgressDialog("Listing buckets for " + s3ServiceMulti.getAWSCredentials().getAccessKey());
try {
final S3Bucket[] buckets = s3ServiceMulti.getS3Service().listAllBuckets();
// Lookup user's CloudFront distributions.
Distribution[] distributions = new Distribution[] {};
if (cloudFrontService != null) {
updateProgressDialog("Listing distributions for " + cloudFrontService.getAWSCredentials().getAccessKey(), "", 0);
distributions = cloudFrontService.listDistributions();
}
final Distribution[] finalDistributions = distributions;
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
for (int i = 0; i < buckets.length; i++) {
// Determine whether each bucket has one or more CloudFront distributions.
boolean bucketHasDistribution = false;
for (int j = 0; j < finalDistributions.length; j++) {
if (finalDistributions[j].getOrigin().equals(buckets[i].getName() + ".s3.amazonaws.com")) {
bucketHasDistribution = true;
}
}
bucketTableModel.addBucket(buckets[i], bucketHasDistribution);
if (i == 0) {
ownerFrame.setTitle(APPLICATION_TITLE + " : " +
buckets[i].getOwner().getDisplayName());
}
}
}
});
} catch (final Exception e) {
stopProgressDialog();
SwingUtilities.invokeLater(new Runnable() {
public void run() {
logoutEvent();
String message = "Unable to list your buckets in S3, please log in again";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, null, message, e);
loginEvent();
}
});
} finally {
stopProgressDialog();
}
};
});
}
/**
* This method is an {@link S3ServiceEventListener} action method that is invoked when this
* application's <code>S3ServiceMulti</code> triggers a <code>GetObjectsEvent</code>.
* <p>
* This never happens in this application as downloads are performed by
* {@link S3ServiceMulti#downloadObjects(S3Bucket, DownloadPackage[])} instead.
*
* @param event
*/
public void s3ServiceEventPerformed(GetObjectsEvent event) {
// Not used.
}
/**
* This method is an {@link S3ServiceEventListener} action method that is invoked when this
* application's <code>S3ServiceMulti</code> triggers a <code>ListObjectsEvent</code>.
* <p>
* This never happens in this application as it does not perform multi-threaded object
* listings.
*
* @param event
*/
public void s3ServiceEventPerformed(ListObjectsEvent event) {
// Not used.
}
/**
* Actions performed when a bucket is selected in the bucket list table.
*/
private void bucketSelectedAction() {
S3Bucket newlySelectedBucket = getCurrentSelectedBucket();
if (newlySelectedBucket == null) {
viewBucketPropertiesMenuItem.setEnabled(false);
refreshBucketMenuItem.setEnabled(true);
updateBucketACLMenuItem.setEnabled(false);
updateBucketRequesterPaysStatusMenuItem.setEnabled(false);
deleteBucketMenuItem.setEnabled(false);
refreshObjectMenuItem.setEnabled(false);
uploadFilesMenuItem.setEnabled(false);
objectTableModel.removeAllObjects();
objectsTable.getDropTarget().setActive(false);
objectsTableSP.getDropTarget().setActive(false);
return;
}
viewBucketPropertiesMenuItem.setEnabled(true);
refreshBucketMenuItem.setEnabled(true);
updateBucketACLMenuItem.setEnabled(true);
updateBucketRequesterPaysStatusMenuItem.setEnabled(true);
deleteBucketMenuItem.setEnabled(true);
refreshObjectMenuItem.setEnabled(true);
uploadFilesMenuItem.setEnabled(true);
objectsTable.getDropTarget().setActive(true);
objectsTableSP.getDropTarget().setActive(true);
if (cachedBuckets.containsKey(newlySelectedBucket.getName())) {
S3Object[] objects = (S3Object[]) cachedBuckets.get(newlySelectedBucket.getName());
objectTableModel.removeAllObjects();
objectTableModel.addObjects(objects);
updateObjectsSummary(false);
} else {
listObjects();
}
}
/**
* Actions performed when an object is selected in the objects list table.
*/
private void objectSelectedAction() {
int count = getSelectedObjects().length;
updateObjectACLMenuItem.setEnabled(count > 0);
downloadObjectMenuItem.setEnabled(count > 0);
deleteObjectMenuItem.setEnabled(count > 0);
viewOrModifyObjectAttributesMenuItem.setEnabled(count > 0);
copyObjectsMenuItem.setEnabled(count > 0);
generatePublicGetUrls.setEnabled(count >= 1);
generateTorrentUrl.setEnabled(count == 1);
}
/**
* Starts a thread to run {@link S3ServiceMulti#listObjects}.
*/
private void listObjects() {
if (getCurrentSelectedBucket() == null) {
// Oops, better do nothing.
return;
}
objectTableModel.removeAllObjects();
objectsSummaryLabel.setText(" ");
final boolean listingCancelled[] = new boolean[1]; // Default to false.
final CancelEventTrigger cancelListener = new CancelEventTrigger() {
private static final long serialVersionUID = 6939193243303189876L;
public void cancelTask(Object eventSource) {
listingCancelled[0] = true;
}
};
// This is all very convoluted, it was done this way to ensure we can display the dialog box.
runInBackgroundThread(new Runnable() {
public void run() {
try {
startProgressDialog(
"Listing objects in " + getCurrentSelectedBucket().getName(),
"", 0, 0, "Cancel bucket listing", cancelListener);
final String prefix = filterObjectsPrefix.getText();
final String delimiter = (String) filterObjectsDelimiter.getSelectedItem();
final ArrayList allObjects = new ArrayList();
String priorLastKey = null;
do {
S3ObjectsChunk chunk = s3ServiceMulti.getS3Service().listObjectsChunked(
getCurrentSelectedBucket().getName(), prefix, delimiter,
BUCKET_LIST_CHUNKING_SIZE, priorLastKey);
final S3Object[] objects = chunk.getObjects();
for (int i = 0; i < objects.length; i++) {
objects[i].setOwner(getCurrentSelectedBucket().getOwner());
}
priorLastKey = chunk.getPriorLastKey();
allObjects.addAll(Arrays.asList(objects));
updateProgressDialog(
"Listed " + allObjects.size() + " objects in "
+ getCurrentSelectedBucket().getName(), "", 0);
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
objectTableModel.addObjects(objects);
updateObjectsSummary(true);
}
});
} while (!listingCancelled[0] && priorLastKey != null);
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
updateObjectsSummary(listingCancelled[0]);
S3Object[] allObjects = objectTableModel.getObjects();
cachedBuckets.put(getCurrentSelectedBucket().getName(), allObjects);
}
});
} catch (final Exception e) {
stopProgressDialog();
String message = "Unable to list objects";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, null, message, e);
} finally {
stopProgressDialog();
}
};
});
}
/**
* Updates the summary text shown below the listing of objects, which details the
* number and total size of the objects.
*
*/
private void updateObjectsSummary(boolean isIncompleteListing) {
S3Object[] objects = objectTableModel.getObjects();
try {
String summary = "Please select a bucket";
long totalBytes = 0;
if (objects != null) {
summary = "<html>" + objects.length + " item" + (objects.length != 1? "s" : "");
for (int i = 0; i < objects.length; i++) {
totalBytes += objects[i].getContentLength();
}
if (totalBytes > 0) {
summary += ", " + byteFormatter.formatByteSize(totalBytes);
}
summary += " @ " + timeSDF.format(new Date());
if (isObjectFilteringActive()) {
summary += " - <font color=\"blue\">Filtered</font>";
}
if (isIncompleteListing) {
summary += " - <font color=\"red\">Incomplete</font>";
}
summary += "</html>";
}
objectsSummaryLabel.setText(summary);
} catch (Throwable t) {
String message = "Unable to update object list summary";
log.error(message, t);
ErrorDialog.showDialog(ownerFrame, this, message, t);
}
}
/**
* Displays bucket-specific actions in a popup menu.
* @param invoker the component near which the popup menu will be displayed
* @param xPos the mouse's horizontal co-ordinate when the popup menu was invoked
* @param yPos the mouse's vertical co-ordinate when the popup menu was invoked
*/
private void showBucketPopupMenu(JComponent invoker, int xPos, int yPos) {
if (s3ServiceMulti == null) {
return;
}
bucketActionMenu.show(invoker, xPos, yPos);
}
/**
* @return the bucket currently selected in the gui, null if no bucket is selected.
*/
private S3Bucket getCurrentSelectedBucket() {
if (bucketsTable.getSelectedRows().length == 0) {
return null;
} else {
return bucketTableModel.getBucket(
bucketTableModelSorter.modelIndex(
bucketsTable.getSelectedRows()[0]));
}
}
/**
* Displays object-specific actions in a popup menu.
* @param invoker the component near which the popup menu will be displayed
* @param xPos the mouse's horizontal co-ordinate when the popup menu was invoked
* @param yPos the mouse's vertical co-ordinate when the popup menu was invoked
*/
private void showObjectPopupMenu(JComponent invoker, int xPos, int yPos) {
if (getCurrentSelectedBucket() == null || getSelectedObjects().length == 0) {
return;
}
objectActionMenu.show(invoker, xPos, yPos);
}
/**
* Action to create a new bucket in S3 after prompting the user for a bucket name.
*
*/
private void createBucketAction() {
String proposedNewName =
s3ServiceMulti.getAWSCredentials().getAccessKey().toLowerCase()
+ "." + "bucket-name";
CreateBucketDialog dialog = new CreateBucketDialog(proposedNewName, ownerFrame, this);
dialog.setVisible(true);
if (!dialog.getOkClicked()) {
return;
}
final S3Bucket newBucket = new S3Bucket(dialog.getBucketName(), dialog.getBucketLocation());
dialog.dispose();
runInBackgroundThread(new Runnable() {
public void run() {
if (s3ServiceMulti.createBuckets(new S3Bucket[] { newBucket })) {
int modelIndex = bucketTableModel.getBucketIndexByName(newBucket.getName());
int viewIndex = bucketTableModelSorter.viewIndex(modelIndex);
bucketsTable.setRowSelectionInterval(viewIndex, viewIndex);
}
}
});
}
/**
* This method is an {@link S3ServiceEventListener} action method that is invoked when this
* application's <code>S3ServiceMulti</code> triggers a <code>CreateBucketsEvent</code>.
* <p>
* When a bucket is successfully created it is added to the listing of buckets.
*
* @param event
*/
public void s3ServiceEventPerformed(final CreateBucketsEvent event) {
if (ServiceEvent.EVENT_STARTED == event.getEventCode()) {
startProgressDialog(
"Creating " + event.getThreadWatcher().getThreadCount() + " buckets",
"", 0, (int) event.getThreadWatcher().getThreadCount(),
"Cancel bucket creation", event.getThreadWatcher().getCancelEventListener());
}
else if (ServiceEvent.EVENT_IN_PROGRESS == event.getEventCode()) {
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
for (int i = 0; i < event.getCreatedBuckets().length; i++) {
bucketTableModel.addBucket(event.getCreatedBuckets()[i], false);
}
}
});
ThreadWatcher progressStatus = event.getThreadWatcher();
String statusText = "Created " + progressStatus.getCompletedThreads() + " buckets of " + progressStatus.getThreadCount();
updateProgressDialog(statusText, "", (int) progressStatus.getCompletedThreads());
}
else if (ServiceEvent.EVENT_COMPLETED == event.getEventCode()) {
stopProgressDialog();
}
else if (ServiceEvent.EVENT_CANCELLED == event.getEventCode()) {
stopProgressDialog();
}
else if (ServiceEvent.EVENT_ERROR == event.getEventCode()) {
stopProgressDialog();
String message = "Unable to create a bucket";
log.error(message, event.getErrorCause());
ErrorDialog.showDialog(ownerFrame, this, message, event.getErrorCause());
}
}
/**
* Deletes the bucket currently selected in the gui.
*
*/
private void deleteSelectedBucket() {
S3Bucket currentBucket = getCurrentSelectedBucket();
if (currentBucket == null) {
log.warn("Ignoring delete bucket command, no currently selected bucket");
return;
}
int response = JOptionPane.showConfirmDialog(ownerFrame,
"Are you sure you want to delete '" + currentBucket.getName() + "'?",
"Delete Bucket?", JOptionPane.YES_NO_OPTION);
if (response == JOptionPane.NO_OPTION) {
return;
}
try {
s3ServiceMulti.getS3Service().deleteBucket(currentBucket.getName());
bucketTableModel.removeBucket(currentBucket);
} catch (Exception e) {
String message = "Unable to delete bucket";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
}
/**
* Adds a bucket not owned by the current S3 user to the bucket listing, after
* prompting the user for the name of the bucket to add.
* To be added in this way, the third-party bucket must be publicly available.
*
*/
private void addThirdPartyBucket() {
try {
String bucketName = (String) JOptionPane.showInputDialog(ownerFrame,
"Name for third-party bucket:",
"Add a third-party bucket", JOptionPane.QUESTION_MESSAGE);
if (bucketName != null) {
if (s3ServiceMulti.getS3Service().isBucketAccessible(bucketName)) {
S3Bucket thirdPartyBucket = new S3Bucket(bucketName);
bucketTableModel.addBucket(thirdPartyBucket, false);
} else {
String message = "Unable to access third-party bucket: " + bucketName;
log.error(message);
ErrorDialog.showDialog(ownerFrame, this, message, null);
}
}
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
String message = "Unable to access third-party bucket";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
}
/**
* Updates the ACL settings for the currently selected bucket.
*/
private void updateBucketAccessControlList() {
try {
S3Bucket currentBucket = getCurrentSelectedBucket();
AccessControlList bucketACL = s3ServiceMulti.getS3Service().getBucketAcl(currentBucket);
AccessControlList updatedBucketACL = AccessControlDialog.showDialog(
ownerFrame, new S3Bucket[] {currentBucket}, bucketACL, this);
if (updatedBucketACL != null) {
currentBucket.setAcl(updatedBucketACL);
s3ServiceMulti.getS3Service().putBucketAcl(currentBucket);
}
} catch (Exception e) {
String message = "Unable to update bucket's Access Control List";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
}
/**
* Updates the ACL settings for the currently selected bucket.
*/
private void updateBucketRequesterPaysSetting() {
try {
final S3Bucket selectedBucket = getCurrentSelectedBucket();
if (!selectedBucket.isRequesterPaysKnown()) {
selectedBucket.setRequesterPays(
s3ServiceMulti.getS3Service().isRequesterPaysBucket(
selectedBucket.getName()));
}
boolean originalRequesterPaysFlag = selectedBucket.isRequesterPays();
RequesterPaysDialog dialog = new RequesterPaysDialog(selectedBucket, ownerFrame, this);
dialog.setVisible(true);
if (!dialog.getOkClicked()) {
return;
}
final boolean newRequesterPaysFlag = dialog.isRequesterPaysSelected();
dialog.dispose();
if (newRequesterPaysFlag != originalRequesterPaysFlag) {
runInBackgroundThread(new Runnable() {
public void run() {
try {
s3ServiceMulti.getS3Service().setRequesterPaysBucket(
selectedBucket.getName(), newRequesterPaysFlag);
selectedBucket.setRequesterPays(newRequesterPaysFlag);
} catch (final Exception e) {
String message = "Unable to update Requester Pays status";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, null, message, e);
}
}
});
}
} catch (Exception e) {
String message = "Unable to update bucket's Access Control List";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
}
/**
* @return the set of objects currently selected in the gui, or an empty array if none are selected.
*/
private S3Object[] getSelectedObjects() {
int viewRows[] = objectsTable.getSelectedRows();
if (viewRows.length == 0) {
return new S3Object[] {};
} else {
S3Object objects[] = new S3Object[viewRows.length];
for (int i = 0; i < viewRows.length; i++) {
int modelRow = objectTableModelSorter.modelIndex(viewRows[i]);
objects[i] = objectTableModel.getObject(modelRow);
}
return objects;
}
}
private void displayAclModificationDialog() {
final HyperlinkActivatedListener hyperlinkListener = this;
runInBackgroundThread(new Runnable() {
public void run() {
final S3Object[] selectedObjects = getSelectedObjects();
boolean aclLookupSucceeded = s3ServiceMulti.getObjectACLs(
getCurrentSelectedBucket(), selectedObjects);
if (!aclLookupSucceeded) {
return;
}
final AccessControlList[] updatedObjectACL = new AccessControlList[] {null};
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
// Build merged ACL containing ALL relevant permissions
AccessControlList mergedACL = new AccessControlList();
for (int i = 0; i < selectedObjects.length; i++) {
AccessControlList objectACL = selectedObjects[i].getAcl();
mergedACL.grantAllPermissions(objectACL.getGrants());
// BEWARE! Here we assume that all the objects have the same owner...
if (mergedACL.getOwner() == null) {
mergedACL.setOwner(objectACL.getOwner());
}
}
// Show ACL dialog box for user to change ACL settings for all objects.
updatedObjectACL[0] = AccessControlDialog.showDialog(
ownerFrame, selectedObjects, mergedACL, hyperlinkListener);
}
});
if (updatedObjectACL[0] != null) {
// Update ACLs for each object.
for (int i = 0; i < selectedObjects.length; i++) {
selectedObjects[i].setAcl(updatedObjectACL[0]);
}
// Perform ACL updates.
s3ServiceMulti.putACLs(getCurrentSelectedBucket(), selectedObjects);
}
}
});
}
/**
* This method is an {@link S3ServiceEventListener} action method that is invoked when this
* application's <code>S3ServiceMulti</code> triggers a <code>LookupACLEvent</code>.
* <p>
* The ACL details are retrieved for the currently selected objects in the gui, then the
* {@link AccessControlDialog} is displayed to allow the user to update the ACL settings
* for these objects.
*
* @param event
*/
public void s3ServiceEventPerformed(LookupACLEvent event) {
if (ServiceEvent.EVENT_STARTED == event.getEventCode()) {
startProgressDialog(
"Retrieved 0 of " + event.getThreadWatcher().getThreadCount() + " ACLs",
"", 0, (int) event.getThreadWatcher().getThreadCount(), "Cancel Lookup",
event.getThreadWatcher().getCancelEventListener());
}
else if (ServiceEvent.EVENT_IN_PROGRESS == event.getEventCode()) {
ThreadWatcher progressStatus = event.getThreadWatcher();
String statusText = "Retrieved " + progressStatus.getCompletedThreads() + " of " + progressStatus.getThreadCount() + " ACLs";
updateProgressDialog(statusText, "", (int) progressStatus.getCompletedThreads());
}
else if (ServiceEvent.EVENT_COMPLETED == event.getEventCode()) {
stopProgressDialog();
}
else if (ServiceEvent.EVENT_CANCELLED == event.getEventCode()) {
stopProgressDialog();
}
else if (ServiceEvent.EVENT_ERROR == event.getEventCode()) {
stopProgressDialog();
String message = "Unable to lookup Access Control list for objects";
log.error(message, event.getErrorCause());
ErrorDialog.showDialog(ownerFrame, this, message, event.getErrorCause());
}
}
/**
* This method is an {@link S3ServiceEventListener} action method that is invoked when this
* application's <code>S3ServiceMulti</code> triggers a <code>UpdateACLEvent</code>.
* <p>
* This method merely updates the progress dialog as ACLs are updated.
*
* @param event
*/
public void s3ServiceEventPerformed(UpdateACLEvent event) {
if (ServiceEvent.EVENT_STARTED == event.getEventCode()) {
startProgressDialog(
"Updated 0 of " + event.getThreadWatcher().getThreadCount() + " ACLs",
"", 0, (int) event.getThreadWatcher().getThreadCount(), "Cancel Update",
event.getThreadWatcher().getCancelEventListener());
}
else if (ServiceEvent.EVENT_IN_PROGRESS == event.getEventCode()) {
ThreadWatcher progressStatus = event.getThreadWatcher();
String statusText = "Updated " + progressStatus.getCompletedThreads() + " of " + progressStatus.getThreadCount() + " ACLs";
updateProgressDialog(statusText, "", (int) progressStatus.getCompletedThreads());
}
else if (ServiceEvent.EVENT_COMPLETED == event.getEventCode()) {
stopProgressDialog();
}
else if (ServiceEvent.EVENT_CANCELLED == event.getEventCode()) {
stopProgressDialog();
}
else if (ServiceEvent.EVENT_ERROR == event.getEventCode()) {
stopProgressDialog();
String message = "Unable to update Access Control Lists";
log.error(message, event.getErrorCause());
ErrorDialog.showDialog(ownerFrame, this, message, event.getErrorCause());
}
}
/**
* Downloads the objects currently selected in the objects table. The user is
* prompted
* Prepares to perform a download of objects from S3 by prompting the user for a directory
* to store the files in, then performing the download.
*
* @throws IOException
*/
private void downloadSelectedObjects() {
// Prompt user to choose directory location for downloaded files (or cancel download altogether)
JFileChooser fileChooser = new JFileChooser();
fileChooser.setDialogTitle("Choose directory to save S3 files in");
fileChooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
fileChooser.setMultiSelectionEnabled(false);
fileChooser.setSelectedFile(downloadDirectory);
int returnVal = fileChooser.showDialog(ownerFrame, "Choose Directory");
if (returnVal != JFileChooser.APPROVE_OPTION) {
return;
}
downloadDirectory = fileChooser.getSelectedFile();
// Find clashing files
final Map filesAlreadyInDownloadDirectoryMap = new HashMap();
S3Object[] objectsForDownload = getSelectedObjects();
for (int i = 0; i < objectsForDownload.length; i++) {
File file = new File(downloadDirectory,
objectsForDownload[i].getKey());
if (file.exists()) {
filesAlreadyInDownloadDirectoryMap.put(
objectsForDownload[i].getKey(), file);
}
}
// Build map of S3 Objects being downloaded.
final Map s3DownloadObjectsMap = FileComparer.getInstance()
.populateS3ObjectMap("", objectsForDownload);
final HyperlinkActivatedListener hyperlinkListener = this;
runInBackgroundThread(new Runnable() {
public void run() {
// Retrieve details of objects for download
if (!retrieveObjectsDetails(getSelectedObjects())) {
return;
}
try {
final FileComparerResults comparisonResults = compareRemoteAndLocalFiles(
filesAlreadyInDownloadDirectoryMap,
s3DownloadObjectsMap);
DownloadPackage[] downloadPackages =
buildDownloadPackageList(comparisonResults, s3DownloadObjectsMap);
if (downloadPackages == null) {
return;
}
s3ServiceMulti.downloadObjects(getCurrentSelectedBucket(),
downloadPackages);
} catch (final Exception e) {
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
String message = "Unable to download objects";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame,
hyperlinkListener, message, e);
}
});
}
}
});
}
private void uploadFiles(File[] uploadFiles) {
// Fail if encryption is turned on but no password is available.
if (cockpitPreferences.isUploadEncryptionActive()
&& !cockpitPreferences.isEncryptionPasswordSet())
{
ErrorDialog.showDialog(ownerFrame, this,
"Upload encryption is enabled but you have not yet set a password in the Encryption Preferences.",
null);
return;
}
try {
// Build map of files proposed for upload.
final Map filesForUploadMap = FileComparer.getInstance()
.buildFileMap(uploadFiles, false);
// Build map of objects already existing in target S3 bucket with keys
// matching the proposed upload keys.
List objectsWithExistingKeys = new ArrayList();
S3Object[] existingObjects = objectTableModel.getObjects();
for (int i = 0; i < existingObjects.length; i++) {
if (filesForUploadMap.keySet().contains(existingObjects[i].getKey()))
{
objectsWithExistingKeys.add(existingObjects[i]);
}
}
existingObjects = (S3Object[]) objectsWithExistingKeys
.toArray(new S3Object[objectsWithExistingKeys.size()]);
final Map s3ExistingObjectsMap = FileComparer.getInstance()
.populateS3ObjectMap("", existingObjects);
final HyperlinkActivatedListener hyperlinkListener = this;
final S3Object[] clashingObjects = existingObjects;
runInBackgroundThread(new Runnable() {
public void run() {
if (clashingObjects.length > 0) {
// Retrieve details of potential clashes
if (!retrieveObjectsDetails(clashingObjects)) {
return;
}
}
try {
FileComparerResults comparisonResults =
compareRemoteAndLocalFiles(filesForUploadMap, s3ExistingObjectsMap);
S3Object[] uploadObjects = buildUploadObjectsList(
comparisonResults, filesForUploadMap);
if (uploadObjects == null) {
return;
}
// Upload the files.
s3ServiceMulti.putObjects(getCurrentSelectedBucket(), uploadObjects);
} catch (final Exception e) {
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
String message = "Unable to upload objects";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, hyperlinkListener, message, e);
}
});
}
}
});
} catch (Exception e) {
String message = "Unable to upload objects";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
}
private FileComparerResults compareRemoteAndLocalFiles(final Map localFilesMap, final Map s3ObjectsMap)
throws Exception
{
try {
// Compare objects being downloaded and existing local files.
final String statusText =
"Comparing " + s3ObjectsMap.size() + " object" + (s3ObjectsMap.size() > 1 ? "s" : "") +
" in S3 with " + localFilesMap.size() + " local file" + (localFilesMap.size() > 1 ? "s" : "");
startProgressDialog(statusText, "", 0, 100, null, null);
// Calculate total files size.
File[] files = (File[]) localFilesMap.values().toArray(new File[localFilesMap.size()]);
final long filesSizeTotal[] = new long[1];
for (int i = 0; i < files.length; i++) {
filesSizeTotal[0] += files[i].length();
}
// Monitor generation of MD5 hash, and provide feedback via the progress bar.
BytesProgressWatcher progressWatcher = new BytesProgressWatcher(filesSizeTotal[0]) {
public void updateBytesTransferred(long byteCount) {
super.updateBytesTransferred(byteCount);
String detailsText = formatBytesProgressWatcherDetails(this, true);
int progressValue = (int)((double)getBytesTransferred() * 100 / getBytesToTransfer());
updateProgressDialog(statusText, detailsText, progressValue);
}
};
FileComparerResults comparisonResults = FileComparer.getInstance()
.buildDiscrepancyLists(localFilesMap, s3ObjectsMap, progressWatcher);
stopProgressDialog();
return comparisonResults;
} finally {
stopProgressDialog();
}
}
/**
* Performs the real work of downloading files by comparing the download candidates against
* existing files, prompting the user whether to overwrite any pre-existing file versions,
* and starting {@link S3ServiceMulti#downloadObjects} where the real work is done.
*
*/
private DownloadPackage[] buildDownloadPackageList(FileComparerResults comparisonResults,
Map s3DownloadObjectsMap) throws Exception
{
// Determine which files to download, prompting user whether to over-write existing files
List objectKeysForDownload = new ArrayList();
objectKeysForDownload.addAll(comparisonResults.onlyOnServerKeys);
int newFiles = comparisonResults.onlyOnServerKeys.size();
int unchangedFiles = comparisonResults.alreadySynchronisedKeys.size();
int changedFiles = comparisonResults.updatedOnClientKeys.size()
+ comparisonResults.updatedOnServerKeys.size();
if (unchangedFiles > 0 || changedFiles > 0) {
// Ask user whether to replace existing unchanged and/or existing changed files.
log.debug("Files for download clash with existing local files, prompting user to choose which files to replace");
List options = new ArrayList();
String message = "Of the " + (newFiles + unchangedFiles + changedFiles)
+ " objects being downloaded:\n\n";
if (newFiles > 0) {
message += newFiles + " files are new.\n\n";
options.add(DOWNLOAD_NEW_FILES_ONLY);
}
if (changedFiles > 0) {
message += changedFiles + " files have changed.\n\n";
options.add(DOWNLOAD_NEW_AND_CHANGED_FILES);
}
if (unchangedFiles > 0) {
message += unchangedFiles + " files already exist and are unchanged.\n\n";
options.add(DOWNLOAD_ALL_FILES);
}
message += "Please choose which files you wish to download:";
Object response = JOptionPane.showInputDialog(
ownerFrame, message, "Replace files?", JOptionPane.QUESTION_MESSAGE,
null, options.toArray(), DOWNLOAD_NEW_AND_CHANGED_FILES);
if (response == null) {
return null;
}
if (DOWNLOAD_NEW_FILES_ONLY.equals(response)) {
// No change required to default objectKeysForDownload list.
} else if (DOWNLOAD_ALL_FILES.equals(response)) {
objectKeysForDownload.addAll(comparisonResults.updatedOnClientKeys);
objectKeysForDownload.addAll(comparisonResults.updatedOnServerKeys);
objectKeysForDownload.addAll(comparisonResults.alreadySynchronisedKeys);
} else if (DOWNLOAD_NEW_AND_CHANGED_FILES.equals(response)) {
objectKeysForDownload.addAll(comparisonResults.updatedOnClientKeys);
objectKeysForDownload.addAll(comparisonResults.updatedOnServerKeys);
} else {
// Download cancelled.
return null;
}
}
log.debug("Downloading " + objectKeysForDownload.size() + " objects");
if (objectKeysForDownload.size() == 0) {
return null;
}
// Create array of objects for download.
S3Object[] objects = new S3Object[objectKeysForDownload.size()];
int objectIndex = 0;
for (Iterator iter = objectKeysForDownload.iterator(); iter.hasNext();) {
objects[objectIndex++] = (S3Object) s3DownloadObjectsMap.get(iter.next());
}
Map downloadObjectsToFileMap = new HashMap();
ArrayList downloadPackageList = new ArrayList();
// Setup files to write to, creating parent directories when necessary.
for (int i = 0; i < objects.length; i++) {
File file = new File(downloadDirectory, objects[i].getKey());
// Encryption password must be null if no password is set.
String encryptionPassword = null;
if (cockpitPreferences.isEncryptionPasswordSet()) {
encryptionPassword = cockpitPreferences.getEncryptionPassword();
}
// Create local directories corresponding to objects flagged as dirs.
if (Mimetypes.MIMETYPE_JETS3T_DIRECTORY.equals(objects[i].getContentType())) {
file.mkdirs();
}
DownloadPackage downloadPackage = ObjectUtils
.createPackageForDownload(objects[i], file, true, true, encryptionPassword);
if (downloadPackage != null) {
downloadObjectsToFileMap.put(objects[i].getKey(), file);
downloadPackageList.add(downloadPackage);
}
}
return (DownloadPackage[]) downloadPackageList
.toArray(new DownloadPackage[downloadPackageList.size()]);
}
/**
* This method is an {@link S3ServiceEventListener} action method that is invoked when this
* application's <code>S3ServiceMulti</code> triggers a <code>DownloadObjectsEvent</code>.
* <p>
* This method merely updates the progress dialog as objects are downloaded.
*
* @param event
*/
public void s3ServiceEventPerformed(DownloadObjectsEvent event) {
if (ServiceEvent.EVENT_STARTED == event.getEventCode()) {
ThreadWatcher watcher = event.getThreadWatcher();
// Show percentage of bytes transferred, if this info is available.
if (watcher.isBytesTransferredInfoAvailable()) {
startProgressDialog("Downloaded " +
watcher.getCompletedThreads() + "/" + watcher.getThreadCount() + " - " +
byteFormatter.formatByteSize(watcher.getBytesTransferred())
+ " of " + byteFormatter.formatByteSize(watcher.getBytesTotal()),
"", 0, 100, "Cancel Download",
watcher.getCancelEventListener());
// ... otherwise just show the number of completed threads.
} else {
startProgressDialog("Downloaded " + watcher.getCompletedThreads()
+ " of " + watcher.getThreadCount() + " objects",
"", 0, (int) watcher.getThreadCount(), "Cancel Download",
watcher.getCancelEventListener());
}
}
else if (ServiceEvent.EVENT_IN_PROGRESS == event.getEventCode()) {
ThreadWatcher watcher = event.getThreadWatcher();
// Show percentage of bytes transferred, if this info is available.
if (watcher.isBytesTransferredInfoAvailable()) {
String bytesCompletedStr = byteFormatter.formatByteSize(watcher.getBytesTransferred());
String bytesTotalStr = byteFormatter.formatByteSize(watcher.getBytesTotal());
String statusText = "Downloaded " +
watcher.getCompletedThreads() + "/" + watcher.getThreadCount() + " - " +
bytesCompletedStr + " of " + bytesTotalStr;
String detailsText = formatTransferDetails(watcher);
int percentage = (int)
(((double)watcher.getBytesTransferred() / watcher.getBytesTotal()) * 100);
updateProgressDialog(statusText, detailsText, percentage);
}
// ... otherwise just show the number of completed threads.
else {
ThreadWatcher progressStatus = event.getThreadWatcher();
String statusText = "Downloaded " + progressStatus.getCompletedThreads()
+ " of " + progressStatus.getThreadCount() + " objects";
updateProgressDialog(statusText, "", (int) progressStatus.getCompletedThreads());
}
} else if (ServiceEvent.EVENT_COMPLETED == event.getEventCode()) {
stopProgressDialog();
}
else if (ServiceEvent.EVENT_CANCELLED == event.getEventCode()) {
stopProgressDialog();
}
else if (ServiceEvent.EVENT_ERROR == event.getEventCode()) {
stopProgressDialog();
String message = "Unable to download objects";
log.error(message, event.getErrorCause());
ErrorDialog.showDialog(ownerFrame, this, message, event.getErrorCause());
}
}
private S3Object[] buildUploadObjectsList(FileComparerResults comparisonResults,
Map uploadingFilesMap) throws Exception
{
// Determine which files to upload, prompting user whether to over-write existing files
List fileKeysForUpload = new ArrayList();
fileKeysForUpload.addAll(comparisonResults.onlyOnClientKeys);
int newFiles = comparisonResults.onlyOnClientKeys.size();
int unchangedFiles = comparisonResults.alreadySynchronisedKeys.size();
int changedFiles = comparisonResults.updatedOnClientKeys.size()
+ comparisonResults.updatedOnServerKeys.size();
if (unchangedFiles > 0 || changedFiles > 0) {
// Ask user whether to replace existing unchanged and/or existing changed files.
log.debug("Files for upload clash with existing S3 objects, prompting user to choose which files to replace");
List options = new ArrayList();
String message = "Of the " + uploadingFilesMap.size()
+ " files being uploaded:\n\n";
if (newFiles > 0) {
message += newFiles + " files are new.\n\n";
options.add(UPLOAD_NEW_FILES_ONLY);
}
if (changedFiles > 0) {
message += changedFiles + " files have changed.\n\n";
options.add(UPLOAD_NEW_AND_CHANGED_FILES);
}
if (unchangedFiles > 0) {
message += unchangedFiles + " files already exist and are unchanged.\n\n";
options.add(UPLOAD_ALL_FILES);
}
message += "Please choose which files you wish to upload:";
Object response = JOptionPane.showInputDialog(
ownerFrame, message, "Replace files?", JOptionPane.QUESTION_MESSAGE,
null, options.toArray(), UPLOAD_NEW_AND_CHANGED_FILES);
if (response == null) {
return null;
}
if (UPLOAD_NEW_FILES_ONLY.equals(response)) {
// No change required to default fileKeysForUpload list.
} else if (UPLOAD_ALL_FILES.equals(response)) {
fileKeysForUpload.addAll(comparisonResults.updatedOnClientKeys);
fileKeysForUpload.addAll(comparisonResults.updatedOnServerKeys);
fileKeysForUpload.addAll(comparisonResults.alreadySynchronisedKeys);
} else if (UPLOAD_NEW_AND_CHANGED_FILES.equals(response)) {
fileKeysForUpload.addAll(comparisonResults.updatedOnClientKeys);
fileKeysForUpload.addAll(comparisonResults.updatedOnServerKeys);
} else {
// Upload cancelled.
stopProgressDialog();
return null;
}
}
if (fileKeysForUpload.size() == 0) {
return null;
}
final String[] statusText = new String[1];
statusText[0] = "Prepared 0 of " + fileKeysForUpload.size() + " files for upload";
startProgressDialog(statusText[0], "", 0, 100, null, null);
long bytesToProcess = 0;
for (Iterator iter = fileKeysForUpload.iterator(); iter.hasNext();) {
File file = (File) uploadingFilesMap.get(iter.next().toString());
bytesToProcess += file.length() *
(cockpitPreferences.isUploadEncryptionActive() || cockpitPreferences.isUploadCompressionActive() ? 3 : 1);
}
BytesProgressWatcher progressWatcher = new BytesProgressWatcher(bytesToProcess) {
public void updateBytesTransferred(long byteCount) {
super.updateBytesTransferred(byteCount);
String detailsText = formatBytesProgressWatcherDetails(this, false);
int progressValue = (int)((double)getBytesTransferred() * 100 / getBytesToTransfer());
updateProgressDialog(statusText[0], detailsText, progressValue);
}
};
// Populate S3Objects representing upload files with metadata etc.
final S3Object[] objects = new S3Object[fileKeysForUpload.size()];
int objectIndex = 0;
for (Iterator iter = fileKeysForUpload.iterator(); iter.hasNext();) {
String fileKey = iter.next().toString();
File file = (File) uploadingFilesMap.get(fileKey);
S3Object newObject = ObjectUtils
.createObjectForUpload(fileKey, file,
(cockpitPreferences.isUploadEncryptionActive() ? encryptionUtil : null),
cockpitPreferences.isUploadCompressionActive(), progressWatcher);
String aclPreferenceString = cockpitPreferences.getUploadACLPermission();
if (CockpitPreferences.UPLOAD_ACL_PERMISSION_PRIVATE.equals(aclPreferenceString)) {
// Objects are private by default, nothing more to do.
} else if (CockpitPreferences.UPLOAD_ACL_PERMISSION_PUBLIC_READ.equals(aclPreferenceString)) {
newObject.setAcl(AccessControlList.REST_CANNED_PUBLIC_READ);
} else if (CockpitPreferences.UPLOAD_ACL_PERMISSION_PUBLIC_READ_WRITE.equals(aclPreferenceString)) {
newObject.setAcl(AccessControlList.REST_CANNED_PUBLIC_READ_WRITE);
} else {
log.warn("Ignoring unrecognised upload ACL permission setting: " + aclPreferenceString);
}
statusText[0] = "Prepared " + (objectIndex + 1)
+ " of " + fileKeysForUpload.size() + " files for upload";
objects[objectIndex++] = newObject;
}
stopProgressDialog();
return objects;
}
/**
* This method is an {@link S3ServiceEventListener} action method that is invoked when this
* application's <code>S3ServiceMulti</code> triggers a <code>CreateObjectsEvent</code>.
* <p>
* This method merely updates the progress dialog as files are uploaded.
*
* @param event
*/
public void s3ServiceEventPerformed(final CreateObjectsEvent event) {
if (ServiceEvent.EVENT_STARTED == event.getEventCode()) {
ThreadWatcher watcher = event.getThreadWatcher();
// Show percentage of bytes transferred, if this info is available.
if (watcher.isBytesTransferredInfoAvailable()) {
String bytesTotalStr = byteFormatter.formatByteSize(watcher.getBytesTotal());
String statusText = "Uploaded " +
watcher.getCompletedThreads() + "/" + watcher.getThreadCount() + " - " +
"0 of " + bytesTotalStr;
startProgressDialog(statusText, " ", 0, 100, "Cancel Upload",
event.getThreadWatcher().getCancelEventListener());
}
// ... otherwise show the number of completed threads.
else {
startProgressDialog(
"Uploaded 0 of " + watcher.getThreadCount() + " objects",
"", (int) watcher.getCompletedThreads(), (int) watcher.getThreadCount(),
"Cancel upload", event.getThreadWatcher().getCancelEventListener());
}
}
else if (ServiceEvent.EVENT_IN_PROGRESS == event.getEventCode()) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
for (int i = 0; i < event.getCreatedObjects().length; i++) {
S3Object object = event.getCreatedObjects()[i];
object.setBucketName(getCurrentSelectedBucket().getName());
objectTableModel.addObject(object);
}
if (event.getCreatedObjects().length > 0) {
updateObjectsSummary(true);
}
}
});
ThreadWatcher watcher = event.getThreadWatcher();
// Show percentage of bytes transferred, if this info is available.
if (watcher.isBytesTransferredInfoAvailable()) {
if (watcher.getBytesTransferred() >= watcher.getBytesTotal()) {
// Upload is completed, just waiting on resonse from S3.
String statusText = "Upload completed, awaiting confirmation";
updateProgressDialog(statusText, "", 100);
} else {
String bytesCompletedStr = byteFormatter.formatByteSize(watcher.getBytesTransferred());
String bytesTotalStr = byteFormatter.formatByteSize(watcher.getBytesTotal());
String statusText = "Uploaded " +
watcher.getCompletedThreads() + "/" + watcher.getThreadCount() + " - " +
bytesCompletedStr + " of " + bytesTotalStr;
int percentage = (int)
(((double)watcher.getBytesTransferred() / watcher.getBytesTotal()) * 100);
String detailsText = formatTransferDetails(watcher);
updateProgressDialog(statusText, detailsText, percentage);
}
}
// ... otherwise show the number of completed threads.
else {
ThreadWatcher progressStatus = event.getThreadWatcher();
String statusText = "Uploaded " + progressStatus.getCompletedThreads() +
" of " + progressStatus.getThreadCount() + " objects";
updateProgressDialog(statusText, "", (int) progressStatus.getCompletedThreads());
}
}
else if (ServiceEvent.EVENT_COMPLETED == event.getEventCode()) {
stopProgressDialog();
SwingUtilities.invokeLater(new Runnable() {
public void run() {
updateObjectsSummary(false);
S3Object[] allObjects = objectTableModel.getObjects();
cachedBuckets.put(getCurrentSelectedBucket().getName(), allObjects);
}
});
}
else if (ServiceEvent.EVENT_CANCELLED == event.getEventCode()) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
updateObjectsSummary(false);
}
});
stopProgressDialog();
}
else if (ServiceEvent.EVENT_ERROR == event.getEventCode()) {
stopProgressDialog();
String message = "Unable to upload objects";
log.error(message, event.getErrorCause());
ErrorDialog.showDialog(ownerFrame, this, message, event.getErrorCause());
}
}
private void copyObjects() {
try {
final S3Object[] sourceObjects = getSelectedObjects();
CopyObjectsDialog dialog = new CopyObjectsDialog(ownerFrame,
"Copy or Move Objects", skinsFactory,
sourceObjects, bucketTableModel.getBuckets());
dialog.setVisible(true);
if (dialog.isCopyActionApproved()) {
final String currentBucketName = getCurrentSelectedBucket().getName();
final String destinationBucketName = dialog.getDestinationBucketName();
final String[] sourceObjectKeys = dialog.getSourceObjectKeys();
final S3Object[] destinationObjects = dialog.getDestinationObjects();
final boolean isDeleteAfterCopy = dialog.isMoveOptionSelected();
final boolean retainAcls = dialog.isCopyOriginalAccessControlLists();
dialog.dispose();
if (!destinationBucketName.equals(currentBucketName)) {
cachedBuckets.remove(destinationBucketName);
}
runInBackgroundThread(new Runnable() {
public void run() {
if (retainAcls) {
// Retain ACL settings from original objects.
if (!s3ServiceMulti.getObjectACLs(
getCurrentSelectedBucket(), sourceObjects))
{
return;
}
for (int i = 0; i < sourceObjects.length; i++) {
destinationObjects[i].setAcl(
sourceObjects[i].getAcl());
}
}
// Copy objects. Metadata is retained, not replaced.
s3ServiceMulti.copyObjects(currentBucketName, destinationBucketName,
sourceObjectKeys, destinationObjects, false);
if (isDeleteAfterCopy) {
final S3Object[] sourceObjects = new S3Object[sourceObjectKeys.length];
for (int i = 0; i < sourceObjectKeys.length; i++) {
sourceObjects[i] = new S3Object(sourceObjectKeys[i]);
}
s3ServiceMulti.deleteObjects(getCurrentSelectedBucket(), sourceObjects);
}
if (destinationBucketName.equals(currentBucketName) || isDeleteAfterCopy) {
// Refesh object listing for current bucket if the bucket's contents
// have changed.
listObjects();
}
}
});
} else {
dialog.dispose();
}
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
stopProgressDialog();
String message = "Unable to modify objects";
log.error(message, e);
ErrorDialog.showDialog(ownerFrame, this, message, e);
}
}
/**
* This method is an {@link S3ServiceEventListener} action method that is invoked when this
* application's <code>S3ServiceMulti</code> triggers a <code>CopyObjectsEvent</code>.
* <p>
* This method merely updates the progress dialog as objects are copied.
*
* @param event
*/
public void s3ServiceEventPerformed(final CopyObjectsEvent event) {
if (ServiceEvent.EVENT_STARTED == event.getEventCode()) {
ThreadWatcher watcher = event.getThreadWatcher();
startProgressDialog("Copied 0 of " + watcher.getThreadCount() + " objects",
"", 0, (int) watcher.getThreadCount(),
"Cancel Copy", event.getThreadWatcher().getCancelEventListener());
}
else if (ServiceEvent.EVENT_IN_PROGRESS == event.getEventCode()) {
ThreadWatcher watcher = event.getThreadWatcher();
String statusText = "Copied " + watcher.getCompletedThreads()
+ " of " + watcher.getThreadCount() + " objects";
updateProgressDialog(statusText, "", (int) watcher.getCompletedThreads());
}
else if (ServiceEvent.EVENT_COMPLETED == event.getEventCode()
|| ServiceEvent.EVENT_CANCELLED == event.getEventCode())
{
stopProgressDialog();
}
else if (ServiceEvent.EVENT_ERROR == event.getEventCode()) {
stopProgressDialog();
String message = "Unable to copy objects";
log.error(message, event.getErrorCause());
ErrorDialog.showDialog(ownerFrame, this, message, event.getErrorCause());
}
}
private void generatePublicGetUrls() {
final S3Object[] objects = getSelectedObjects();
if (objects.length < 1) {
log.warn("Ignoring Generate Public URLs object command because no objects are selected");
return;
}
SignedGetUrlDialog dialog = new SignedGetUrlDialog(ownerFrame, this,
s3ServiceMulti.getS3Service(), objects);
dialog.setVisible(true);
}
private void generateTorrentUrl() {
final S3Object[] objects = getSelectedObjects();
if (objects.length != 1) {
log.warn("Ignoring Generate Public URL object command, can only operate on a single object");
return;
}
S3Object currentObject = objects[0];
// Generate URL
String torrentUrl = S3Service.createTorrentUrl(
getCurrentSelectedBucket().getName(), currentObject.getKey());
// Display signed URL
JOptionPane.showInputDialog(ownerFrame,
"Torrent URL for '" + currentObject.getKey() + "'.",
"Torrent URL", JOptionPane.INFORMATION_MESSAGE, null, null, torrentUrl);
}
private void deleteSelectedObjects() {
final S3Object[] objects = getSelectedObjects();
if (objects.length == 0) {
log.warn("Ignoring delete objects command, no currently selected objects");
return;
}
int response = JOptionPane.showConfirmDialog(ownerFrame,
(objects.length == 1 ?
"Are you sure you want to delete '" + objects[0].getKey() + "'?" :
"Are you sure you want to delete " + objects.length + " objects"
),
"Delete Objects?", JOptionPane.YES_NO_OPTION);
if (response == JOptionPane.NO_OPTION) {
return;
}
runInBackgroundThread(new Runnable() {
public void run() {
s3ServiceMulti.deleteObjects(getCurrentSelectedBucket(), objects);
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
updateObjectsSummary(false);
S3Object[] allObjects = objectTableModel.getObjects();
cachedBuckets.put(getCurrentSelectedBucket().getName(), allObjects);
}
});
}
});
}
/**
* This method is an {@link S3ServiceEventListener} action method that is invoked when this
* application's <code>S3ServiceMulti</code> triggers a <code>DeleteObjectsEvent</code>.
* <p>
* This method merely updates the progress dialog as objects are deleted.
*
* @param event
*/
public void s3ServiceEventPerformed(final DeleteObjectsEvent event) {
if (ServiceEvent.EVENT_STARTED == event.getEventCode()) {
startProgressDialog(
"Deleted 0 of " + event.getThreadWatcher().getThreadCount() + " objects",
"", 0, (int) event.getThreadWatcher().getThreadCount(), "Cancel Delete Objects",
event.getThreadWatcher().getCancelEventListener());
}
else if (ServiceEvent.EVENT_IN_PROGRESS == event.getEventCode()) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
for (int i = 0; i < event.getDeletedObjects().length; i++) {
objectTableModel.removeObject(
event.getDeletedObjects()[i]);
}
if (event.getDeletedObjects().length > 0) {
updateObjectsSummary(true);
}
}
});
ThreadWatcher progressStatus = event.getThreadWatcher();
String statusText = "Deleted " + progressStatus.getCompletedThreads()
+ " of " + progressStatus.getThreadCount() + " objects";
updateProgressDialog(statusText, "", (int) progressStatus.getCompletedThreads());
}
else if (ServiceEvent.EVENT_COMPLETED == event.getEventCode()) {
stopProgressDialog();
}
else if (ServiceEvent.EVENT_CANCELLED == event.getEventCode()) {
listObjects(); // Refresh object listing.
stopProgressDialog();
}
else if (ServiceEvent.EVENT_ERROR == event.getEventCode()) {
listObjects(); // Refresh object listing.
stopProgressDialog();
String message = "Unable to delete objects";
log.error(message, event.getErrorCause());
ErrorDialog.showDialog(ownerFrame, this, message, event.getErrorCause());
}
}
/**
* Retrieves details about objects including metadata etc by invoking the method
* {@link S3ServiceMulti#getObjectsHeads}.
*
* This is generally done as a prelude to some further action, such as
* displaying the objects' details or downloading the objects.
* The real action occurs in the method <code>s3ServiceEventPerformed</code> for handling
* <code>GetObjectHeadsEvent</code> events.
*
* @param candidateObjects
*
* @return
* true if objects details were successfully retrieved.
*/
private boolean retrieveObjectsDetails(final S3Object[] candidateObjects) {
// Identify which of the candidate objects have incomplete metadata.
ArrayList s3ObjectsIncompleteList = new ArrayList();
for (int i = 0; i < candidateObjects.length; i++) {
if (!candidateObjects[i].isMetadataComplete()) {
s3ObjectsIncompleteList.add(candidateObjects[i]);
}
}
log.debug("Of " + candidateObjects.length + " object candidates for HEAD requests "
+ s3ObjectsIncompleteList.size() + " are incomplete, performing requests for these only");
final S3Object[] incompleteObjects = (S3Object[]) s3ObjectsIncompleteList
.toArray(new S3Object[s3ObjectsIncompleteList.size()]);
return s3ServiceMulti.getObjectsHeads(getCurrentSelectedBucket(), incompleteObjects);
}
/**
* This method is an {@link S3ServiceEventListener} action method that is invoked when this
* application's <code>S3ServiceMulti</code> triggers a <code>GetObjectHeadsEvent</code>.
* <p>
* This method merely updates the progress dialog as object details (heads) are retrieved.
*
* @param event
*/
public void s3ServiceEventPerformed(final GetObjectHeadsEvent event) {
if (ServiceEvent.EVENT_STARTED == event.getEventCode()) {
if (event.getThreadWatcher().getThreadCount() > 0) {
startProgressDialog("Retrieved details for 0 of "
+ event.getThreadWatcher().getThreadCount() + " objects",
"", 0, (int) event.getThreadWatcher().getThreadCount(), "Cancel Retrieval",
event.getThreadWatcher().getCancelEventListener());
}
}
else if (ServiceEvent.EVENT_IN_PROGRESS == event.getEventCode()) {
final ThreadWatcher progressStatus = event.getThreadWatcher();
// Store detail-complete objects in table.
runInDispatcherThreadImmediately(new Runnable() {
public void run() {
// Update object in table with the retrieved details.
for (int i = 0; i < event.getCompletedObjects().length; i++) {
S3Object objectWithDetails = event.getCompletedObjects()[i];
S3Object originalObject = objectTableModel.getObjectByKey(
objectWithDetails.getKey());
originalObject.replaceAllMetadata(objectWithDetails.getMetadataMap());
originalObject.setMetadataComplete(true);
log.debug("Updated table with " + originalObject.getKey()
+ ", content-type=" + originalObject.getContentType());
}
}
});
// Update progress of GetObject requests.
String statusText = "Retrieved details for " + progressStatus.getCompletedThreads()
+ " of " + progressStatus.getThreadCount() + " objects";
updateProgressDialog(statusText, "", (int) progressStatus.getCompletedThreads());
}
else if (ServiceEvent.EVENT_COMPLETED == event.getEventCode()) {
// Stop GetObjectHead progress display.
stopProgressDialog();
}
else if (ServiceEvent.EVENT_CANCELLED == event.getEventCode()) {
stopProgressDialog();
}
else if (ServiceEvent.EVENT_ERROR == event.getEventCode()) {
stopProgressDialog();
String message = "Unable to retrieve objects details";
log.error(message, event.getErrorCause());
ErrorDialog.showDialog(ownerFrame, this, message, event.getErrorCause());
}
}
private String formatTransferDetails(ThreadWatcher watcher) {
long bytesPerSecond = watcher.getBytesPerSecond();
String detailsText = byteFormatter.formatByteSize(bytesPerSecond) + "/s";
if (watcher.isTimeRemainingAvailable()) {
long secondsRemaining = watcher.getTimeRemaining();
detailsText += " - Time remaining: " + timeFormatter.formatTime(secondsRemaining);
}
return detailsText;
}
private String formatBytesProgressWatcherDetails(BytesProgressWatcher watcher, boolean includeBytes) {
long secondsRemaining = watcher.getRemainingTime();
String detailsText =
(includeBytes
? byteFormatter.formatByteSize(watcher.getBytesTransferred())
+ " of " + byteFormatter.formatByteSize(watcher.getBytesToTransfer())
+ " - "
: "")
+ "Time remaining: " +
timeFormatter.formatTime(secondsRemaining);
return detailsText;
}
/**
* Follows hyperlinks clicked on by a user. This is achieved differently depending on whether
* Cockpit is running as an applet or as a stand-alone application:
* <ul>
* <li>Application: Detects the default browser application for the user's system (using
* <tt>BareBonesBrowserLaunch</tt>) and opens the link as a new window in that browser</li>
* <li>Applet: Opens the link in the current browser using the applet's context</li>
* </ul>
*
* @param url
* the url to open
* @param target
* the target pane to open the url in, eg "_blank". This may be null.
*/
public void followHyperlink(URL url, String target) {
if (!isStandAloneApplication) {
if (target == null) {
getAppletContext().showDocument(url);
} else {
getAppletContext().showDocument(url, target);
}
} else {
BareBonesBrowserLaunch.openURL(url.toString());
}
}
/**
* Implementation method for the CredentialsProvider interface.
* <p>
* Based on sample code:
* <a href="http://svn.apache.org/viewvc/jakarta/commons/proper/httpclient/trunk/src/examples/InteractiveAuthenticationExample.java?view=markup">InteractiveAuthenticationExample</a>
*
*/
public Credentials getCredentials(AuthScheme authscheme, String host, int port, boolean proxy) throws CredentialsNotAvailableException {
if (authscheme == null) {
return null;
}
try {
Credentials credentials = null;
if (authscheme instanceof NTLMScheme) {
AuthenticationDialog pwDialog = new AuthenticationDialog(
ownerFrame, "Authentication Required",
"<html>Host <b>" + host + ":" + port + "</b> requires Windows authentication</html>", true);
pwDialog.setVisible(true);
if (pwDialog.getUser().length() > 0) {
credentials = new NTCredentials(pwDialog.getUser(), pwDialog.getPassword(),
host, pwDialog.getDomain());
}
pwDialog.dispose();
} else
if (authscheme instanceof RFC2617Scheme) {
AuthenticationDialog pwDialog = new AuthenticationDialog(
ownerFrame, "Authentication Required",
"<html><center>Host <b>" + host + ":" + port + "</b>"
+ " requires authentication for the realm:<br><b>" + authscheme.getRealm() + "</b></center></html>", false);
pwDialog.setVisible(true);
if (pwDialog.getUser().length() > 0) {
credentials = new UsernamePasswordCredentials(pwDialog.getUser(), pwDialog.getPassword());
}
pwDialog.dispose();
} else {
throw new CredentialsNotAvailableException("Unsupported authentication scheme: " +
authscheme.getSchemeName());
}
return credentials;
} catch (IOException e) {
throw new CredentialsNotAvailableException(e.getMessage(), e);
}
}
private boolean isObjectFilteringActive() {
if (!filterObjectsCheckBox.isSelected()) {
return false;
} else {
String delimiter = (String) filterObjectsDelimiter.getSelectedItem();
if (filterObjectsPrefix.getText().length() > 0
|| delimiter.length() > 0)
{
return true;
} else {
return false;
}
}
}
private class ContextMenuListener extends MouseAdapter {
public void mousePressed(MouseEvent e) {
showContextMenu(e);
}
public void mouseReleased(MouseEvent e) {
showContextMenu(e);
}
private void showContextMenu(MouseEvent e) {
if (e.isPopupTrigger()) {
// Select item under context-click.
if (e.getSource() instanceof JList) {
JList jList = (JList) e.getSource();
int locIndex = jList.locationToIndex(e.getPoint());
if (locIndex >= 0) {
jList.setSelectedIndex(locIndex);
}
} else if (e.getSource() instanceof JTable) {
JTable jTable = (JTable) e.getSource();
int rowIndex = jTable.rowAtPoint(e.getPoint());
if (rowIndex >= 0) {
jTable.addRowSelectionInterval(rowIndex, rowIndex);
}
}
// Show context popup menu.
if (e.getSource().equals(bucketsTable)) {
showBucketPopupMenu((JComponent)e.getSource(), e.getX(), e.getY());
} else if (e.getSource().equals(objectsTable)) {
showObjectPopupMenu((JComponent)e.getSource(), e.getX(), e.getY());
}
}
}
}
/**
* Runs Cockpit as a stand-alone application.
* @param args
* @throws Exception
*/
public static void main(String args[]) throws Exception {
JFrame ownerFrame = new JFrame("JetS3t Cockpit");
ownerFrame.addWindowListener(new WindowListener() {
public void windowOpened(WindowEvent e) {
}
public void windowClosing(WindowEvent e) {
e.getWindow().dispose();
}
public void windowClosed(WindowEvent e) {
}
public void windowIconified(WindowEvent e) {
}
public void windowDeiconified(WindowEvent e) {
}
public void windowActivated(WindowEvent e) {
}
public void windowDeactivated(WindowEvent e) {
}
});
new Cockpit(ownerFrame);
}
}
| Removed an object re-listing action when an object deletion is cancelled, as it was causing nasty GUI race condition exceptions
| src/org/jets3t/apps/cockpit/Cockpit.java | Removed an object re-listing action when an object deletion is cancelled, as it was causing nasty GUI race condition exceptions |
|
Java | apache-2.0 | 59509d22b7f14b69000c2ed2bccd58166155a40a | 0 | 3dcitydb/importer-exporter,3dcitydb/importer-exporter,3dcitydb/importer-exporter | /*
* 3D City Database - The Open Source CityGML Database
* http://www.3dcitydb.org/
*
* (C) 2013 - 2016,
* Chair of Geoinformatics,
* Technische Universitaet Muenchen, Germany
* http://www.gis.bgu.tum.de/
*
* The 3D City Database is jointly developed with the following
* cooperation partners:
*
* virtualcitySYSTEMS GmbH, Berlin <http://www.virtualcitysystems.de/>
* M.O.S.S. Computer Grafik Systeme GmbH, Muenchen <http://www.moss.de/>
*
* The 3D City Database Importer/Exporter program is free software:
* you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free
* Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*/
package org.citydb.modules.kml.database;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.StringWriter;
import java.nio.charset.Charset;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import javax.imageio.ImageIO;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import net.opengis.kml._2.DocumentType;
import net.opengis.kml._2.KmlType;
import net.opengis.kml._2.LatLonAltBoxType;
import net.opengis.kml._2.LinkType;
import net.opengis.kml._2.LodType;
import net.opengis.kml._2.NetworkLinkType;
import net.opengis.kml._2.ObjectFactory;
import net.opengis.kml._2.PlacemarkType;
import net.opengis.kml._2.RegionType;
import net.opengis.kml._2.ViewRefreshModeEnumType;
import org.citydb.api.concurrent.WorkerPool;
import org.citydb.api.event.EventDispatcher;
import org.citydb.api.log.LogLevel;
import org.citydb.config.Config;
import org.citydb.config.project.kmlExporter.DisplayForm;
import org.citydb.database.adapter.BlobExportAdapter;
import org.citydb.log.Logger;
import org.citydb.modules.common.balloon.BalloonTemplateHandlerImpl;
import org.citydb.modules.common.event.CounterEvent;
import org.citydb.modules.common.event.CounterType;
import org.citydb.modules.kml.util.CityObject4JSON;
import org.citydb.modules.kml.util.ExportTracker;
import org.citygml4j.model.citygml.CityGMLClass;
import org.citygml4j.util.xml.SAXEventBuffer;
public class KmlExporterManager {
private final JAXBContext jaxbKmlContext;
private final JAXBContext jaxbColladaContext;
private final WorkerPool<SAXEventBuffer> ioWriterPool;
private final ExportTracker tracker;
private final ObjectFactory kmlFactory;
private final BlobExportAdapter textureExportAdapter;
private final EventDispatcher eventDispatcher;
private final Config config;
private boolean isBBoxActive;
private String mainFilename;
private HashMap<CityGMLClass, Long> featureCounterMap;
private final String ENCODING = "UTF-8";
private final Charset CHARSET = Charset.forName(ENCODING);
private final String TEMP_FOLDER = "__temp";
public KmlExporterManager(JAXBContext jaxbKmlContext,
JAXBContext jaxbColladaContext,
WorkerPool<SAXEventBuffer> ioWriterPool,
ExportTracker tracker,
ObjectFactory kmlFactory,
BlobExportAdapter textureExportAdapter,
EventDispatcher eventDispatcher,
Config config) {
this.jaxbKmlContext = jaxbKmlContext;
this.jaxbColladaContext = jaxbColladaContext;
this.ioWriterPool = ioWriterPool;
this.tracker = tracker;
this.kmlFactory = kmlFactory;
this.textureExportAdapter = textureExportAdapter;
this.eventDispatcher = eventDispatcher;
this.config = config;
isBBoxActive = config.getProject().getKmlExporter().getFilter().getComplexFilter().getTiledBoundingBox().getActive().booleanValue();
mainFilename = config.getInternal().getExportFileName().trim();
if (mainFilename.lastIndexOf(File.separator) != -1) {
if (mainFilename.lastIndexOf(".") == -1) {
mainFilename = mainFilename.substring(mainFilename.lastIndexOf(File.separator) + 1);
}
else {
mainFilename = mainFilename.substring(mainFilename.lastIndexOf(File.separator) + 1, mainFilename.lastIndexOf("."));
}
}
else {
if (mainFilename.lastIndexOf(".") != -1) {
mainFilename = mainFilename.substring(0, mainFilename.lastIndexOf("."));
}
}
mainFilename = mainFilename + ".kml";
featureCounterMap = new HashMap<CityGMLClass, Long>();
}
public void updateFeatureTracker(KmlSplittingResult work) {
Long counter = featureCounterMap.get(work.getCityObjectType());
if (counter == null)
featureCounterMap.put(work.getCityObjectType(), new Long(1));
else
featureCounterMap.put(work.getCityObjectType(), counter + 1);
tracker.put(work.getId(), work.getJson());
eventDispatcher.triggerEvent(new CounterEvent(CounterType.TOPLEVEL_FEATURE, 1, this));
}
public HashMap<CityGMLClass, Long> getFeatureCounter() {
return featureCounterMap;
}
public ExportTracker getExportTracker() {
return this.tracker;
}
public void print(List<PlacemarkType> placemarkList,
KmlSplittingResult work,
boolean balloonInSeparateFile) throws JAXBException {
SAXEventBuffer buffer = new SAXEventBuffer();
Marshaller kmlMarshaller = jaxbKmlContext.createMarshaller();
if (isBBoxActive && config.getProject().getKmlExporter().isOneFilePerObject()) {
kmlMarshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
}
else {
kmlMarshaller.setProperty(Marshaller.JAXB_FRAGMENT, true);
}
// all placemarks in this list belong together (same gmlid),
// so the balloon must be extracted only once.
boolean balloonExtracted = false;
String gmlId = null;
String placemarkDescription = null;
KmlType kmlType = null;
DocumentType document = null;
ZipOutputStream zipOut = null;
OutputStreamWriter fileWriter = null;
String path = tracker.getCurrentWorkingDirectoryPath();
File directory = new File(path);
try {
for (PlacemarkType placemark: placemarkList) {
if (placemark != null) {
String displayFormName = work.getDisplayForm().getName();
if (placemark.getDescription() != null && balloonInSeparateFile) {
StringBuilder parentFrame = new StringBuilder(BalloonTemplateHandlerImpl.parentFrameStart);
parentFrame.append('.'); // same folder
parentFrame.append('/').append(BalloonTemplateHandlerImpl.balloonDirectoryName);
parentFrame.append('/').append(work.getGmlId()).append('-').append(work.getId());
parentFrame.append(BalloonTemplateHandlerImpl.parentFrameEnd);
if (!balloonExtracted) {
placemarkDescription = placemark.getDescription();
// --------------- create subfolder ---------------
if (config.getProject().getKmlExporter().isExportAsKmz()) {
if (!isBBoxActive || !config.getProject().getKmlExporter().isOneFilePerObject()) {
// export temporarily as kml, it will be later added to kmz if needed
directory = new File(path, TEMP_FOLDER);
if (!directory.exists()) {
Logger.getInstance().info("Creating temporary folder...");
directory.mkdir();
}
}
}
else { // export as kml
if (config.getProject().getKmlExporter().isOneFilePerObject()) {
directory = new File(path, String.valueOf(work.getId()));
if (!directory.exists()) {
directory.mkdir();
}
}
}
if (!isBBoxActive || !config.getProject().getKmlExporter().isOneFilePerObject() || !config.getProject().getKmlExporter().isExportAsKmz()) {
try {
File balloonsDirectory = new File(directory, BalloonTemplateHandlerImpl.balloonDirectoryName);
if (!balloonsDirectory.exists()) {
balloonsDirectory.mkdir();
}
File htmlFile = new File(balloonsDirectory, work.getGmlId() + '-' + work.getId() + ".html");
FileOutputStream outputStream = new FileOutputStream(htmlFile);
outputStream.write(placemarkDescription.getBytes(CHARSET));
outputStream.close();
}
catch (IOException ioe) {
ioe.printStackTrace();
}
}
balloonExtracted = true;
}
placemark.setDescription(parentFrame.toString());
}
if (isBBoxActive && config.getProject().getKmlExporter().isOneFilePerObject()) {
if (gmlId == null) {
gmlId = work.getGmlId();
boolean isHighlighting = false;
String filename = gmlId + "_" + displayFormName;
if (placemark.getId().startsWith(config.getProject().getKmlExporter().getIdPrefixes().getPlacemarkHighlight())) {
filename = filename + "_" + DisplayForm.HIGHLIGTHTED_STR;
isHighlighting = true;
}
File placemarkDirectory = new File(path + File.separator + work.getId());
if (!placemarkDirectory.exists()) {
placemarkDirectory.mkdir();
}
// create kml root element
kmlType = kmlFactory.createKmlType();
document = kmlFactory.createDocumentType();
document.setOpen(true);
document.setName(filename);
kmlType.setAbstractFeatureGroup(kmlFactory.createDocument(document));
String fileExtension = ".kml";
if (config.getProject().getKmlExporter().isExportAsKmz()) {
fileExtension = ".kmz";
File placemarkFile = new File(placemarkDirectory, filename + ".kmz");
zipOut = new ZipOutputStream(new FileOutputStream(placemarkFile));
ZipEntry zipEntry = new ZipEntry("doc.kml");
zipOut.putNextEntry(zipEntry);
fileWriter = new OutputStreamWriter(zipOut, CHARSET);
}
else {
File placemarkFile = new File(placemarkDirectory, filename + ".kml");
fileWriter = new OutputStreamWriter(new FileOutputStream(placemarkFile), CHARSET);
}
// the network link pointing to the file
NetworkLinkType networkLinkType = kmlFactory.createNetworkLinkType();
LinkType linkType = kmlFactory.createLinkType();
if (isHighlighting) {
networkLinkType.setName(gmlId + " " + displayFormName + " " + DisplayForm.HIGHLIGTHTED_STR);
linkType.setHref(work.getId() + "/" + gmlId + "_" + displayFormName + "_" + DisplayForm.HIGHLIGTHTED_STR + fileExtension);
}
else { // actual placemark, non-highlighting
networkLinkType.setName(gmlId + " " + displayFormName);
linkType.setHref(work.getId() + "/" + gmlId + "_" + displayFormName + fileExtension);
}
linkType.setViewRefreshMode(ViewRefreshModeEnumType.fromValue(config.getProject().getKmlExporter().getViewRefreshMode()));
linkType.setViewFormat("");
if (linkType.getViewRefreshMode() == ViewRefreshModeEnumType.ON_STOP) {
linkType.setViewRefreshTime(config.getProject().getKmlExporter().getViewRefreshTime());
}
LatLonAltBoxType latLonAltBoxType = kmlFactory.createLatLonAltBoxType();
CityObject4JSON cityObject4JSON = tracker.get(work.getId());
if (cityObject4JSON != null) { // avoid NPE when aborting large KML/COLLADA exports
latLonAltBoxType.setNorth(cityObject4JSON.getEnvelopeYmax());
latLonAltBoxType.setSouth(cityObject4JSON.getEnvelopeYmin());
latLonAltBoxType.setEast(cityObject4JSON.getEnvelopeXmax());
latLonAltBoxType.setWest(cityObject4JSON.getEnvelopeXmin());
}
LodType lodType = kmlFactory.createLodType();
lodType.setMinLodPixels(config.getProject().getKmlExporter().getSingleObjectRegionSize());
if (work.getDisplayForm().getVisibleUpTo() == -1)
lodType.setMaxLodPixels(-1.0);
else
lodType.setMaxLodPixels((double)work.getDisplayForm().getVisibleUpTo() * (lodType.getMinLodPixels()/work.getDisplayForm().getVisibleFrom()));
RegionType regionType = kmlFactory.createRegionType();
regionType.setLatLonAltBox(latLonAltBoxType);
regionType.setLod(lodType);
// confusion between atom:link and kml:Link in ogckml22.xsd
networkLinkType.getRest().add(kmlFactory.createLink(linkType));
networkLinkType.setRegion(regionType);
kmlMarshaller.marshal(kmlFactory.createNetworkLink(networkLinkType), buffer);
}
placemark.setStyleUrl(".." + File.separator + ".." + File.separator + ".." + File.separator + ".." + File.separator + mainFilename + placemark.getStyleUrl());
document.getAbstractFeatureGroup().add(kmlFactory.createPlacemark(placemark));
}
else {
kmlMarshaller.marshal(kmlFactory.createPlacemark(placemark), buffer);
}
}
}
if (isBBoxActive && config.getProject().getKmlExporter().isOneFilePerObject() && kmlType != null) { // some Placemarks ARE null
if (config.getProject().getKmlExporter().isExportAsKmz()) {
kmlMarshaller.marshal(kmlFactory.createKml(kmlType), fileWriter);
zipOut.closeEntry();
if (balloonInSeparateFile) {
for (PlacemarkType placemark: placemarkList) {
if (placemark != null) {
ZipEntry zipEntry = new ZipEntry(BalloonTemplateHandlerImpl.balloonDirectoryName + "/" + work.getGmlId() + '-' + work.getId() + ".html");
if (placemarkDescription != null) {
zipOut.putNextEntry(zipEntry);
zipOut.write(placemarkDescription.getBytes(CHARSET));
zipOut.closeEntry();
break; // only once since gmlId is the same for all placemarks
}
}
}
}
zipOut.close();
}
else {
kmlMarshaller.marshal(kmlFactory.createKml(kmlType), fileWriter);
fileWriter.close();
}
}
// buffer should not be empty, otherwise cause an error exception in IO Worker
if (!buffer.isEmpty()) {
ioWriterPool.addWork(buffer); // placemark or region depending on isOneFilePerObject()
}
}
catch (IOException ioe) {
ioe.printStackTrace();
}
}
public void print(ColladaBundle colladaBundle, long id, boolean balloonInSeparateFile) throws JAXBException,
FileNotFoundException,
IOException,
SQLException {
ZipOutputStream zipOut = null;
OutputStreamWriter fileWriter = null;
SAXEventBuffer buffer = new SAXEventBuffer();
Marshaller kmlMarshaller = jaxbKmlContext.createMarshaller();
if (isBBoxActive && config.getProject().getKmlExporter().isOneFilePerObject()) {
kmlMarshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
}
else {
kmlMarshaller.setProperty(Marshaller.JAXB_FRAGMENT, true);
}
Marshaller colladaMarshaller = jaxbColladaContext.createMarshaller();
colladaMarshaller.setProperty(Marshaller.JAXB_ENCODING, ENCODING);
colladaMarshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
PlacemarkType placemark = colladaBundle.getPlacemark();
String path = tracker.getCurrentWorkingDirectoryPath();
if (placemark != null) {
String placemarkDescription = placemark.getDescription();
if (placemarkDescription != null && balloonInSeparateFile) {
StringBuilder parentFrame = new StringBuilder(BalloonTemplateHandlerImpl.parentFrameStart);
if (isBBoxActive &&
config.getProject().getKmlExporter().isOneFilePerObject() &&
!config.getProject().getKmlExporter().isExportAsKmz())
parentFrame.append(".."); // one up
else
parentFrame.append("."); // same folder
parentFrame.append('/').append(BalloonTemplateHandlerImpl.balloonDirectoryName);
parentFrame.append('/').append(colladaBundle.getGmlId()).append('-').append(colladaBundle.getId());
parentFrame.append(BalloonTemplateHandlerImpl.parentFrameEnd);
placemark.setDescription(parentFrame.toString());
colladaBundle.setExternalBalloonFileContent(placemarkDescription);
}
if (isBBoxActive && config.getProject().getKmlExporter().isOneFilePerObject()) {
// the file per object
KmlType kmlType = kmlFactory.createKmlType();
DocumentType document = kmlFactory.createDocumentType();
document.setOpen(true);
document.setName(colladaBundle.getGmlId());
kmlType.setAbstractFeatureGroup(kmlFactory.createDocument(document));
document.getAbstractFeatureGroup().add(kmlFactory.createPlacemark(placemark));
File placemarkDirectory = new File(path + File.separator + colladaBundle.getId());
if (!placemarkDirectory.exists()) {
placemarkDirectory.mkdir();
}
String fileExtension = ".kml";
try {
if (config.getProject().getKmlExporter().isExportAsKmz()) {
fileExtension = ".kmz";
File placemarkFile = new File(placemarkDirectory, colladaBundle.getGmlId() + "_collada.kmz");
zipOut = new ZipOutputStream(new FileOutputStream(placemarkFile));
ZipEntry zipEntry = new ZipEntry("doc.kml");
zipOut.putNextEntry(zipEntry);
fileWriter = new OutputStreamWriter(zipOut, CHARSET);
kmlMarshaller.marshal(kmlFactory.createKml(kmlType), fileWriter);
zipOut.closeEntry();
}
else {
File placemarkFile = new File(placemarkDirectory, colladaBundle.getGmlId() + "_collada.kml");
fileWriter = new OutputStreamWriter(new FileOutputStream(placemarkFile), CHARSET);
kmlMarshaller.marshal(kmlFactory.createKml(kmlType), fileWriter);
fileWriter.close();
}
}
catch (IOException ioe) {
ioe.printStackTrace();
}
// the network link pointing to the file
NetworkLinkType networkLinkType = kmlFactory.createNetworkLinkType();
networkLinkType.setName(colladaBundle.getGmlId() + " " + DisplayForm.COLLADA_STR);
RegionType regionType = kmlFactory.createRegionType();
LatLonAltBoxType latLonAltBoxType = kmlFactory.createLatLonAltBoxType();
CityObject4JSON cityObject4JSON = tracker.get(id);
if (cityObject4JSON != null) { // avoid NPE when aborting large KML/COLLADA exports
latLonAltBoxType.setNorth(cityObject4JSON.getEnvelopeYmax());
latLonAltBoxType.setSouth(cityObject4JSON.getEnvelopeYmin());
latLonAltBoxType.setEast(cityObject4JSON.getEnvelopeXmax());
latLonAltBoxType.setWest(cityObject4JSON.getEnvelopeXmin());
}
LodType lodType = kmlFactory.createLodType();
lodType.setMinLodPixels(config.getProject().getKmlExporter().getSingleObjectRegionSize());
regionType.setLatLonAltBox(latLonAltBoxType);
regionType.setLod(lodType);
LinkType linkType = kmlFactory.createLinkType();
linkType.setHref(colladaBundle.getId() + "/" + colladaBundle.getGmlId() + "_" + DisplayForm.COLLADA_STR + fileExtension);
linkType.setViewRefreshMode(ViewRefreshModeEnumType.fromValue(config.getProject().getKmlExporter().getViewRefreshMode()));
linkType.setViewFormat("");
if (linkType.getViewRefreshMode() == ViewRefreshModeEnumType.ON_STOP) {
linkType.setViewRefreshTime(config.getProject().getKmlExporter().getViewRefreshTime());
}
// confusion between atom:link and kml:Link in ogckml22.xsd
networkLinkType.getRest().add(kmlFactory.createLink(linkType));
networkLinkType.setRegion(regionType);
kmlMarshaller.marshal(kmlFactory.createNetworkLink(networkLinkType), buffer);
}
else { // !config.getProject().getKmlExporter().isOneFilePerObject()
kmlMarshaller.marshal(kmlFactory.createPlacemark(placemark), buffer);
}
ioWriterPool.addWork(buffer); // placemark or region depending on isOneFilePerObject()
colladaBundle.setPlacemark(null); // free heap space
}
// so much for the placemark, now model, images and balloon...
if (config.getProject().getKmlExporter().isExportAsKmz() && isBBoxActive
&& config.getProject().getKmlExporter().isOneFilePerObject()) {
// marshalling in parallel threads should save some time
StringWriter sw = new StringWriter();
colladaMarshaller.marshal(colladaBundle.getCollada(), sw);
colladaBundle.setColladaAsString(sw.toString());
colladaBundle.setCollada(null); // free heap space
// ----------------- model saving -----------------
ZipEntry zipEntry = new ZipEntry(colladaBundle.getId() + "/" + colladaBundle.getGmlId() + ".dae");
zipOut.putNextEntry(zipEntry);
zipOut.write(colladaBundle.getColladaAsString().getBytes(CHARSET));
zipOut.closeEntry();
// ----------------- image saving -----------------
if (colladaBundle.getUnsupportedTexImageIds() != null) {
Set<String> keySet = colladaBundle.getUnsupportedTexImageIds().keySet();
Iterator<String> iterator = keySet.iterator();
while (iterator.hasNext()) {
String imageFilename = iterator.next();
byte[] ordImageBytes = textureExportAdapter.getInByteArray(colladaBundle.getUnsupportedTexImageIds().get(imageFilename), imageFilename);
zipEntry = imageFilename.startsWith("..") ?
new ZipEntry(imageFilename.substring(3)): // skip .. and File.separator
new ZipEntry(colladaBundle.getId() + "/" + imageFilename);
zipOut.putNextEntry(zipEntry);
zipOut.write(ordImageBytes, 0, ordImageBytes.length);
zipOut.closeEntry();
}
}
if (colladaBundle.getTexImages() != null) {
Set<String> keySet = colladaBundle.getTexImages().keySet();
Iterator<String> iterator = keySet.iterator();
while (iterator.hasNext()) {
String imageFilename = iterator.next();
BufferedImage texImage = colladaBundle.getTexImages().get(imageFilename).getBufferedImage();
String imageType = imageFilename.substring(imageFilename.lastIndexOf('.') + 1);
zipEntry = imageFilename.startsWith("..") ?
new ZipEntry(imageFilename.substring(3)): // skip .. and File.separator
new ZipEntry(colladaBundle.getId() + "/" + imageFilename);
zipOut.putNextEntry(zipEntry);
ImageIO.write(texImage, imageType, zipOut);
zipOut.closeEntry();
}
}
// ----------------- balloon saving -----------------
if (colladaBundle.getExternalBalloonFileContent() != null) {
zipEntry = new ZipEntry(BalloonTemplateHandlerImpl.balloonDirectoryName + "/" + colladaBundle.getGmlId() + '-' + colladaBundle.getId() + ".html");
zipOut.putNextEntry(zipEntry);
zipOut.write(colladaBundle.getExternalBalloonFileContent().getBytes(CHARSET));
zipOut.closeEntry();
}
zipOut.close();
}
else {
if (config.getProject().getKmlExporter().isExportAsKmz()) {
// export temporarily as kml, it will be later added to kmz if needed
File tempFolder = new File(path, TEMP_FOLDER);
if (!tempFolder.exists()) {
Logger.getInstance().info("Creating temporary folder...");
tempFolder.mkdir();
}
path = path + File.separator + TEMP_FOLDER;
}
// --------------- create subfolder ---------------
File buildingDirectory = new File(path, String.valueOf(colladaBundle.getId()));
if (!buildingDirectory.exists()) {
buildingDirectory.mkdir();
}
// ----------------- model saving -----------------
File colladaModelFile = new File(buildingDirectory, colladaBundle.getGmlId() + ".dae");
File gltfModelFile = new File(buildingDirectory, colladaBundle.getGmlId() + ".gltf");
FileOutputStream fos = new FileOutputStream(colladaModelFile);
colladaMarshaller.marshal(colladaBundle.getCollada(), fos);
fos.close();
// ----------------- create glTF without embedded textures-----------------
if (config.getProject().getKmlExporter().isCreateGltfModel() && !config.getProject().getKmlExporter().isEmbedTexturesInGltfFiles()) {
convertColladaToglTF(colladaBundle, buildingDirectory, colladaModelFile, gltfModelFile);
}
// ----------------- image saving -----------------
if (colladaBundle.getUnsupportedTexImageIds() != null) {
Set<String> keySet = colladaBundle.getUnsupportedTexImageIds().keySet();
Iterator<String> iterator = keySet.iterator();
while (iterator.hasNext()) {
String imageFilename = iterator.next();
String fileName = buildingDirectory + File.separator + imageFilename;
textureExportAdapter.getInFile(colladaBundle.getUnsupportedTexImageIds().get(imageFilename), imageFilename, fileName);
}
}
if (colladaBundle.getTexImages() != null) {
Set<String> keySet = colladaBundle.getTexImages().keySet();
Iterator<String> iterator = keySet.iterator();
while (iterator.hasNext()) {
String imageFilename = iterator.next();
BufferedImage texImage = colladaBundle.getTexImages().get(imageFilename).getBufferedImage();
String imageType = imageFilename.substring(imageFilename.lastIndexOf('.') + 1);
File imageFile = new File(buildingDirectory, imageFilename);
if (!imageFile.exists()) // avoid overwriting and access conflicts
ImageIO.write(texImage, imageType, imageFile);
}
}
// ----------------- create glTF with embedded textures-----------------
if (config.getProject().getKmlExporter().isCreateGltfModel() && config.getProject().getKmlExporter().isEmbedTexturesInGltfFiles()) {
convertColladaToglTF(colladaBundle, buildingDirectory, colladaModelFile, gltfModelFile);
if (config.getProject().getKmlExporter().isNotCreateColladaFiles() && gltfModelFile.exists()) {
Set<String> keySet = colladaBundle.getTexImages().keySet();
Iterator<String> iterator = keySet.iterator();
while (iterator.hasNext()) {
String imageFilename = iterator.next();
File imageFile = new File(buildingDirectory, imageFilename);
if (imageFile.exists())
imageFile.delete();
}
}
}
// ----------------- balloon saving -----------------
if (colladaBundle.getExternalBalloonFileContent() != null) {
try {
File balloonsDirectory = new File(buildingDirectory + File.separator + BalloonTemplateHandlerImpl.balloonDirectoryName);
if (!balloonsDirectory.exists()) {
balloonsDirectory.mkdir();
}
File htmlFile = new File(balloonsDirectory, colladaBundle.getGmlId() + '-' + colladaBundle.getId() + ".html");
FileOutputStream outputStream = new FileOutputStream(htmlFile);
outputStream.write(colladaBundle.getExternalBalloonFileContent().getBytes(CHARSET));
outputStream.close();
}
catch (IOException ioe) {
ioe.printStackTrace();
}
}
}
}
private void convertColladaToglTF(ColladaBundle colladaBundle, File buildingDirectory, File colladaModelFile, File gltfModelFile) {
String collada2gltfPath = config.getProject().getKmlExporter().getPathOfGltfConverter();
File collada2gltfFile = new File(collada2gltfPath);
if (collada2gltfFile.exists()) {
ProcessBuilder pb = new ProcessBuilder(collada2gltfFile.getAbsolutePath(), "-f", colladaBundle.getGmlId() + ".dae", "-e", "true");
pb.directory(buildingDirectory);
try {
Process process = pb.start();
process.waitFor();
} catch (IOException|InterruptedException e) {
Logger.getInstance().debug("Unexpected errors occured while converting collada to glTF for city object '" + colladaBundle.getGmlId() + "' with output path: '" + gltfModelFile.getAbsolutePath() + "'");
}
finally {
if (config.getProject().getKmlExporter().isNotCreateColladaFiles() && gltfModelFile.exists()) {
colladaModelFile.delete();
}
}
}
}
}
| src/org/citydb/modules/kml/database/KmlExporterManager.java | /*
* 3D City Database - The Open Source CityGML Database
* http://www.3dcitydb.org/
*
* (C) 2013 - 2016,
* Chair of Geoinformatics,
* Technische Universitaet Muenchen, Germany
* http://www.gis.bgu.tum.de/
*
* The 3D City Database is jointly developed with the following
* cooperation partners:
*
* virtualcitySYSTEMS GmbH, Berlin <http://www.virtualcitysystems.de/>
* M.O.S.S. Computer Grafik Systeme GmbH, Muenchen <http://www.moss.de/>
*
* The 3D City Database Importer/Exporter program is free software:
* you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free
* Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*/
package org.citydb.modules.kml.database;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.StringWriter;
import java.nio.charset.Charset;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import javax.imageio.ImageIO;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import net.opengis.kml._2.DocumentType;
import net.opengis.kml._2.KmlType;
import net.opengis.kml._2.LatLonAltBoxType;
import net.opengis.kml._2.LinkType;
import net.opengis.kml._2.LodType;
import net.opengis.kml._2.NetworkLinkType;
import net.opengis.kml._2.ObjectFactory;
import net.opengis.kml._2.PlacemarkType;
import net.opengis.kml._2.RegionType;
import net.opengis.kml._2.ViewRefreshModeEnumType;
import org.citydb.api.concurrent.WorkerPool;
import org.citydb.api.event.EventDispatcher;
import org.citydb.api.log.LogLevel;
import org.citydb.config.Config;
import org.citydb.config.project.kmlExporter.DisplayForm;
import org.citydb.database.adapter.BlobExportAdapter;
import org.citydb.log.Logger;
import org.citydb.modules.common.balloon.BalloonTemplateHandlerImpl;
import org.citydb.modules.common.event.CounterEvent;
import org.citydb.modules.common.event.CounterType;
import org.citydb.modules.kml.util.CityObject4JSON;
import org.citydb.modules.kml.util.ExportTracker;
import org.citygml4j.model.citygml.CityGMLClass;
import org.citygml4j.util.xml.SAXEventBuffer;
public class KmlExporterManager {
private final JAXBContext jaxbKmlContext;
private final JAXBContext jaxbColladaContext;
private final WorkerPool<SAXEventBuffer> ioWriterPool;
private final ExportTracker tracker;
private final ObjectFactory kmlFactory;
private final BlobExportAdapter textureExportAdapter;
private final EventDispatcher eventDispatcher;
private final Config config;
private boolean isBBoxActive;
private String mainFilename;
private HashMap<CityGMLClass, Long> featureCounterMap;
private final String ENCODING = "UTF-8";
private final Charset CHARSET = Charset.forName(ENCODING);
private final String TEMP_FOLDER = "__temp";
public KmlExporterManager(JAXBContext jaxbKmlContext,
JAXBContext jaxbColladaContext,
WorkerPool<SAXEventBuffer> ioWriterPool,
ExportTracker tracker,
ObjectFactory kmlFactory,
BlobExportAdapter textureExportAdapter,
EventDispatcher eventDispatcher,
Config config) {
this.jaxbKmlContext = jaxbKmlContext;
this.jaxbColladaContext = jaxbColladaContext;
this.ioWriterPool = ioWriterPool;
this.tracker = tracker;
this.kmlFactory = kmlFactory;
this.textureExportAdapter = textureExportAdapter;
this.eventDispatcher = eventDispatcher;
this.config = config;
isBBoxActive = config.getProject().getKmlExporter().getFilter().getComplexFilter().getTiledBoundingBox().getActive().booleanValue();
mainFilename = config.getInternal().getExportFileName().trim();
if (mainFilename.lastIndexOf(File.separator) != -1) {
if (mainFilename.lastIndexOf(".") == -1) {
mainFilename = mainFilename.substring(mainFilename.lastIndexOf(File.separator) + 1);
}
else {
mainFilename = mainFilename.substring(mainFilename.lastIndexOf(File.separator) + 1, mainFilename.lastIndexOf("."));
}
}
else {
if (mainFilename.lastIndexOf(".") != -1) {
mainFilename = mainFilename.substring(0, mainFilename.lastIndexOf("."));
}
}
mainFilename = mainFilename + ".kml";
featureCounterMap = new HashMap<CityGMLClass, Long>();
}
public void updateFeatureTracker(KmlSplittingResult work) {
Long counter = featureCounterMap.get(work.getCityObjectType());
if (counter == null)
featureCounterMap.put(work.getCityObjectType(), new Long(1));
else
featureCounterMap.put(work.getCityObjectType(), counter + 1);
tracker.put(work.getId(), work.getJson());
eventDispatcher.triggerEvent(new CounterEvent(CounterType.TOPLEVEL_FEATURE, 1, this));
}
public HashMap<CityGMLClass, Long> getFeatureCounter() {
return featureCounterMap;
}
public ExportTracker getExportTracker() {
return this.tracker;
}
public void print(List<PlacemarkType> placemarkList,
KmlSplittingResult work,
boolean balloonInSeparateFile) throws JAXBException {
SAXEventBuffer buffer = new SAXEventBuffer();
Marshaller kmlMarshaller = jaxbKmlContext.createMarshaller();
if (isBBoxActive && config.getProject().getKmlExporter().isOneFilePerObject()) {
kmlMarshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
}
else {
kmlMarshaller.setProperty(Marshaller.JAXB_FRAGMENT, true);
}
// all placemarks in this list belong together (same gmlid),
// so the balloon must be extracted only once.
boolean balloonExtracted = false;
String gmlId = null;
String placemarkDescription = null;
KmlType kmlType = null;
DocumentType document = null;
ZipOutputStream zipOut = null;
OutputStreamWriter fileWriter = null;
String path = tracker.getCurrentWorkingDirectoryPath();
File directory = new File(path);
try {
for (PlacemarkType placemark: placemarkList) {
if (placemark != null) {
String displayFormName = work.getDisplayForm().getName();
if (placemark.getDescription() != null && balloonInSeparateFile) {
StringBuilder parentFrame = new StringBuilder(BalloonTemplateHandlerImpl.parentFrameStart);
parentFrame.append('.'); // same folder
parentFrame.append('/').append(BalloonTemplateHandlerImpl.balloonDirectoryName);
parentFrame.append('/').append(work.getGmlId()).append('-').append(work.getId());
parentFrame.append(BalloonTemplateHandlerImpl.parentFrameEnd);
if (!balloonExtracted) {
placemarkDescription = placemark.getDescription();
// --------------- create subfolder ---------------
if (config.getProject().getKmlExporter().isExportAsKmz()) {
if (!isBBoxActive || !config.getProject().getKmlExporter().isOneFilePerObject()) {
// export temporarily as kml, it will be later added to kmz if needed
directory = new File(path, TEMP_FOLDER);
if (!directory.exists()) {
Logger.getInstance().info("Creating temporary folder...");
directory.mkdir();
}
}
}
else { // export as kml
if (config.getProject().getKmlExporter().isOneFilePerObject()) {
directory = new File(path, String.valueOf(work.getId()));
if (!directory.exists()) {
directory.mkdir();
}
}
}
if (!isBBoxActive || !config.getProject().getKmlExporter().isOneFilePerObject() || !config.getProject().getKmlExporter().isExportAsKmz()) {
try {
File balloonsDirectory = new File(directory, BalloonTemplateHandlerImpl.balloonDirectoryName);
if (!balloonsDirectory.exists()) {
balloonsDirectory.mkdir();
}
File htmlFile = new File(balloonsDirectory, work.getGmlId() + '-' + work.getId() + ".html");
FileOutputStream outputStream = new FileOutputStream(htmlFile);
outputStream.write(placemarkDescription.getBytes(CHARSET));
outputStream.close();
}
catch (IOException ioe) {
ioe.printStackTrace();
}
}
balloonExtracted = true;
}
placemark.setDescription(parentFrame.toString());
}
if (isBBoxActive && config.getProject().getKmlExporter().isOneFilePerObject()) {
if (gmlId == null) {
gmlId = work.getGmlId();
boolean isHighlighting = false;
String filename = gmlId + "_" + displayFormName;
if (placemark.getId().startsWith(config.getProject().getKmlExporter().getIdPrefixes().getPlacemarkHighlight())) {
filename = filename + "_" + DisplayForm.HIGHLIGTHTED_STR;
isHighlighting = true;
}
File placemarkDirectory = new File(path + File.separator + work.getId());
if (!placemarkDirectory.exists()) {
placemarkDirectory.mkdir();
}
// create kml root element
kmlType = kmlFactory.createKmlType();
document = kmlFactory.createDocumentType();
document.setOpen(true);
document.setName(filename);
kmlType.setAbstractFeatureGroup(kmlFactory.createDocument(document));
String fileExtension = ".kml";
if (config.getProject().getKmlExporter().isExportAsKmz()) {
fileExtension = ".kmz";
File placemarkFile = new File(placemarkDirectory, filename + ".kmz");
zipOut = new ZipOutputStream(new FileOutputStream(placemarkFile));
ZipEntry zipEntry = new ZipEntry("doc.kml");
zipOut.putNextEntry(zipEntry);
fileWriter = new OutputStreamWriter(zipOut, CHARSET);
}
else {
File placemarkFile = new File(placemarkDirectory, filename + ".kml");
fileWriter = new OutputStreamWriter(new FileOutputStream(placemarkFile), CHARSET);
}
// the network link pointing to the file
NetworkLinkType networkLinkType = kmlFactory.createNetworkLinkType();
LinkType linkType = kmlFactory.createLinkType();
if (isHighlighting) {
networkLinkType.setName(gmlId + " " + displayFormName + " " + DisplayForm.HIGHLIGTHTED_STR);
linkType.setHref(work.getId() + "/" + gmlId + "_" + displayFormName + "_" + DisplayForm.HIGHLIGTHTED_STR + fileExtension);
}
else { // actual placemark, non-highlighting
networkLinkType.setName(gmlId + " " + displayFormName);
linkType.setHref(work.getId() + "/" + gmlId + "_" + displayFormName + fileExtension);
}
linkType.setViewRefreshMode(ViewRefreshModeEnumType.fromValue(config.getProject().getKmlExporter().getViewRefreshMode()));
linkType.setViewFormat("");
if (linkType.getViewRefreshMode() == ViewRefreshModeEnumType.ON_STOP) {
linkType.setViewRefreshTime(config.getProject().getKmlExporter().getViewRefreshTime());
}
LatLonAltBoxType latLonAltBoxType = kmlFactory.createLatLonAltBoxType();
CityObject4JSON cityObject4JSON = tracker.get(work.getId());
if (cityObject4JSON != null) { // avoid NPE when aborting large KML/COLLADA exports
latLonAltBoxType.setNorth(cityObject4JSON.getEnvelopeYmax());
latLonAltBoxType.setSouth(cityObject4JSON.getEnvelopeYmin());
latLonAltBoxType.setEast(cityObject4JSON.getEnvelopeXmax());
latLonAltBoxType.setWest(cityObject4JSON.getEnvelopeXmin());
}
LodType lodType = kmlFactory.createLodType();
lodType.setMinLodPixels(config.getProject().getKmlExporter().getSingleObjectRegionSize());
if (work.getDisplayForm().getVisibleUpTo() == -1)
lodType.setMaxLodPixels(-1.0);
else
lodType.setMaxLodPixels((double)work.getDisplayForm().getVisibleUpTo() * (lodType.getMinLodPixels()/work.getDisplayForm().getVisibleFrom()));
RegionType regionType = kmlFactory.createRegionType();
regionType.setLatLonAltBox(latLonAltBoxType);
regionType.setLod(lodType);
// confusion between atom:link and kml:Link in ogckml22.xsd
networkLinkType.getRest().add(kmlFactory.createLink(linkType));
networkLinkType.setRegion(regionType);
kmlMarshaller.marshal(kmlFactory.createNetworkLink(networkLinkType), buffer);
}
placemark.setStyleUrl(".." + File.separator + ".." + File.separator + ".." + File.separator + ".." + File.separator + mainFilename + placemark.getStyleUrl());
document.getAbstractFeatureGroup().add(kmlFactory.createPlacemark(placemark));
}
else {
kmlMarshaller.marshal(kmlFactory.createPlacemark(placemark), buffer);
}
}
}
if (isBBoxActive && config.getProject().getKmlExporter().isOneFilePerObject() && kmlType != null) { // some Placemarks ARE null
if (config.getProject().getKmlExporter().isExportAsKmz()) {
kmlMarshaller.marshal(kmlFactory.createKml(kmlType), fileWriter);
zipOut.closeEntry();
if (balloonInSeparateFile) {
for (PlacemarkType placemark: placemarkList) {
if (placemark != null) {
ZipEntry zipEntry = new ZipEntry(BalloonTemplateHandlerImpl.balloonDirectoryName + "/" + work.getGmlId() + '-' + work.getId() + ".html");
if (placemarkDescription != null) {
zipOut.putNextEntry(zipEntry);
zipOut.write(placemarkDescription.getBytes(CHARSET));
zipOut.closeEntry();
break; // only once since gmlId is the same for all placemarks
}
}
}
}
zipOut.close();
}
else {
kmlMarshaller.marshal(kmlFactory.createKml(kmlType), fileWriter);
fileWriter.close();
}
}
// buffer should not be empty, otherwise cause an error exception in IO Worker
if (!buffer.isEmpty()) {
ioWriterPool.addWork(buffer); // placemark or region depending on isOneFilePerObject()
}
}
catch (IOException ioe) {
ioe.printStackTrace();
}
}
public void print(ColladaBundle colladaBundle, long id, boolean balloonInSeparateFile) throws JAXBException,
FileNotFoundException,
IOException,
SQLException {
ZipOutputStream zipOut = null;
OutputStreamWriter fileWriter = null;
SAXEventBuffer buffer = new SAXEventBuffer();
Marshaller kmlMarshaller = jaxbKmlContext.createMarshaller();
if (isBBoxActive && config.getProject().getKmlExporter().isOneFilePerObject()) {
kmlMarshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
}
else {
kmlMarshaller.setProperty(Marshaller.JAXB_FRAGMENT, true);
}
Marshaller colladaMarshaller = jaxbColladaContext.createMarshaller();
colladaMarshaller.setProperty(Marshaller.JAXB_ENCODING, ENCODING);
colladaMarshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);
PlacemarkType placemark = colladaBundle.getPlacemark();
String path = tracker.getCurrentWorkingDirectoryPath();
if (placemark != null) {
String placemarkDescription = placemark.getDescription();
if (placemarkDescription != null && balloonInSeparateFile) {
StringBuilder parentFrame = new StringBuilder(BalloonTemplateHandlerImpl.parentFrameStart);
if (isBBoxActive &&
config.getProject().getKmlExporter().isOneFilePerObject() &&
!config.getProject().getKmlExporter().isExportAsKmz())
parentFrame.append(".."); // one up
else
parentFrame.append("."); // same folder
parentFrame.append('/').append(BalloonTemplateHandlerImpl.balloonDirectoryName);
parentFrame.append('/').append(colladaBundle.getGmlId()).append('-').append(colladaBundle.getId());
parentFrame.append(BalloonTemplateHandlerImpl.parentFrameEnd);
placemark.setDescription(parentFrame.toString());
colladaBundle.setExternalBalloonFileContent(placemarkDescription);
}
if (isBBoxActive && config.getProject().getKmlExporter().isOneFilePerObject()) {
// the file per object
KmlType kmlType = kmlFactory.createKmlType();
DocumentType document = kmlFactory.createDocumentType();
document.setOpen(true);
document.setName(colladaBundle.getGmlId());
kmlType.setAbstractFeatureGroup(kmlFactory.createDocument(document));
document.getAbstractFeatureGroup().add(kmlFactory.createPlacemark(placemark));
File placemarkDirectory = new File(path + File.separator + colladaBundle.getId());
if (!placemarkDirectory.exists()) {
placemarkDirectory.mkdir();
}
String fileExtension = ".kml";
try {
if (config.getProject().getKmlExporter().isExportAsKmz()) {
fileExtension = ".kmz";
File placemarkFile = new File(placemarkDirectory, colladaBundle.getGmlId() + "_collada.kmz");
zipOut = new ZipOutputStream(new FileOutputStream(placemarkFile));
ZipEntry zipEntry = new ZipEntry("doc.kml");
zipOut.putNextEntry(zipEntry);
fileWriter = new OutputStreamWriter(zipOut, CHARSET);
kmlMarshaller.marshal(kmlFactory.createKml(kmlType), fileWriter);
zipOut.closeEntry();
}
else {
File placemarkFile = new File(placemarkDirectory, colladaBundle.getGmlId() + "_collada.kml");
fileWriter = new OutputStreamWriter(new FileOutputStream(placemarkFile), CHARSET);
kmlMarshaller.marshal(kmlFactory.createKml(kmlType), fileWriter);
fileWriter.close();
}
}
catch (IOException ioe) {
ioe.printStackTrace();
}
// the network link pointing to the file
NetworkLinkType networkLinkType = kmlFactory.createNetworkLinkType();
networkLinkType.setName(colladaBundle.getGmlId() + " " + DisplayForm.COLLADA_STR);
RegionType regionType = kmlFactory.createRegionType();
LatLonAltBoxType latLonAltBoxType = kmlFactory.createLatLonAltBoxType();
CityObject4JSON cityObject4JSON = tracker.get(id);
if (cityObject4JSON != null) { // avoid NPE when aborting large KML/COLLADA exports
latLonAltBoxType.setNorth(cityObject4JSON.getEnvelopeYmax());
latLonAltBoxType.setSouth(cityObject4JSON.getEnvelopeYmin());
latLonAltBoxType.setEast(cityObject4JSON.getEnvelopeXmax());
latLonAltBoxType.setWest(cityObject4JSON.getEnvelopeXmin());
}
LodType lodType = kmlFactory.createLodType();
lodType.setMinLodPixels(config.getProject().getKmlExporter().getSingleObjectRegionSize());
regionType.setLatLonAltBox(latLonAltBoxType);
regionType.setLod(lodType);
LinkType linkType = kmlFactory.createLinkType();
linkType.setHref(colladaBundle.getId() + "/" + colladaBundle.getGmlId() + "_" + DisplayForm.COLLADA_STR + fileExtension);
linkType.setViewRefreshMode(ViewRefreshModeEnumType.fromValue(config.getProject().getKmlExporter().getViewRefreshMode()));
linkType.setViewFormat("");
if (linkType.getViewRefreshMode() == ViewRefreshModeEnumType.ON_STOP) {
linkType.setViewRefreshTime(config.getProject().getKmlExporter().getViewRefreshTime());
}
// confusion between atom:link and kml:Link in ogckml22.xsd
networkLinkType.getRest().add(kmlFactory.createLink(linkType));
networkLinkType.setRegion(regionType);
kmlMarshaller.marshal(kmlFactory.createNetworkLink(networkLinkType), buffer);
}
else { // !config.getProject().getKmlExporter().isOneFilePerObject()
kmlMarshaller.marshal(kmlFactory.createPlacemark(placemark), buffer);
}
ioWriterPool.addWork(buffer); // placemark or region depending on isOneFilePerObject()
colladaBundle.setPlacemark(null); // free heap space
}
// so much for the placemark, now model, images and balloon...
if (config.getProject().getKmlExporter().isExportAsKmz() && isBBoxActive
&& config.getProject().getKmlExporter().isOneFilePerObject()) {
// marshalling in parallel threads should save some time
StringWriter sw = new StringWriter();
colladaMarshaller.marshal(colladaBundle.getCollada(), sw);
colladaBundle.setColladaAsString(sw.toString());
colladaBundle.setCollada(null); // free heap space
// ----------------- model saving -----------------
ZipEntry zipEntry = new ZipEntry(colladaBundle.getId() + "/" + colladaBundle.getGmlId() + ".dae");
zipOut.putNextEntry(zipEntry);
zipOut.write(colladaBundle.getColladaAsString().getBytes(CHARSET));
zipOut.closeEntry();
// ----------------- image saving -----------------
if (colladaBundle.getUnsupportedTexImageIds() != null) {
Set<String> keySet = colladaBundle.getUnsupportedTexImageIds().keySet();
Iterator<String> iterator = keySet.iterator();
while (iterator.hasNext()) {
String imageFilename = iterator.next();
byte[] ordImageBytes = textureExportAdapter.getInByteArray(colladaBundle.getUnsupportedTexImageIds().get(imageFilename), imageFilename);
zipEntry = imageFilename.startsWith("..") ?
new ZipEntry(imageFilename.substring(3)): // skip .. and File.separator
new ZipEntry(colladaBundle.getId() + "/" + imageFilename);
zipOut.putNextEntry(zipEntry);
zipOut.write(ordImageBytes, 0, ordImageBytes.length);
zipOut.closeEntry();
}
}
if (colladaBundle.getTexImages() != null) {
Set<String> keySet = colladaBundle.getTexImages().keySet();
Iterator<String> iterator = keySet.iterator();
while (iterator.hasNext()) {
String imageFilename = iterator.next();
BufferedImage texImage = colladaBundle.getTexImages().get(imageFilename).getBufferedImage();
String imageType = imageFilename.substring(imageFilename.lastIndexOf('.') + 1);
zipEntry = imageFilename.startsWith("..") ?
new ZipEntry(imageFilename.substring(3)): // skip .. and File.separator
new ZipEntry(colladaBundle.getId() + "/" + imageFilename);
zipOut.putNextEntry(zipEntry);
ImageIO.write(texImage, imageType, zipOut);
zipOut.closeEntry();
}
}
// ----------------- balloon saving -----------------
if (colladaBundle.getExternalBalloonFileContent() != null) {
zipEntry = new ZipEntry(BalloonTemplateHandlerImpl.balloonDirectoryName + "/" + colladaBundle.getGmlId() + '-' + colladaBundle.getId() + ".html");
zipOut.putNextEntry(zipEntry);
zipOut.write(colladaBundle.getExternalBalloonFileContent().getBytes(CHARSET));
zipOut.closeEntry();
}
zipOut.close();
}
else {
if (config.getProject().getKmlExporter().isExportAsKmz()) {
// export temporarily as kml, it will be later added to kmz if needed
File tempFolder = new File(path, TEMP_FOLDER);
if (!tempFolder.exists()) {
Logger.getInstance().info("Creating temporary folder...");
tempFolder.mkdir();
}
path = path + File.separator + TEMP_FOLDER;
}
// --------------- create subfolder ---------------
File buildingDirectory = new File(path, String.valueOf(colladaBundle.getId()));
if (!buildingDirectory.exists()) {
buildingDirectory.mkdir();
}
// ----------------- model saving -----------------
File buildingModelFile = new File(buildingDirectory, colladaBundle.getGmlId() + ".dae");
FileOutputStream fos = new FileOutputStream(buildingModelFile);
colladaMarshaller.marshal(colladaBundle.getCollada(), fos);
fos.close();
// ----------------- create glTF without embedded textures-----------------
if (config.getProject().getKmlExporter().isCreateGltfModel() && !config.getProject().getKmlExporter().isEmbedTexturesInGltfFiles()) {
convertColladaToglTF(colladaBundle, buildingDirectory, buildingModelFile);
}
// ----------------- image saving -----------------
if (colladaBundle.getUnsupportedTexImageIds() != null) {
Set<String> keySet = colladaBundle.getUnsupportedTexImageIds().keySet();
Iterator<String> iterator = keySet.iterator();
while (iterator.hasNext()) {
String imageFilename = iterator.next();
String fileName = buildingDirectory + File.separator + imageFilename;
textureExportAdapter.getInFile(colladaBundle.getUnsupportedTexImageIds().get(imageFilename), imageFilename, fileName);
}
}
if (colladaBundle.getTexImages() != null) {
Set<String> keySet = colladaBundle.getTexImages().keySet();
Iterator<String> iterator = keySet.iterator();
while (iterator.hasNext()) {
String imageFilename = iterator.next();
BufferedImage texImage = colladaBundle.getTexImages().get(imageFilename).getBufferedImage();
String imageType = imageFilename.substring(imageFilename.lastIndexOf('.') + 1);
File imageFile = new File(buildingDirectory, imageFilename);
if (!imageFile.exists()) // avoid overwriting and access conflicts
ImageIO.write(texImage, imageType, imageFile);
}
}
// ----------------- create glTF with embedded textures-----------------
if (config.getProject().getKmlExporter().isCreateGltfModel() && config.getProject().getKmlExporter().isEmbedTexturesInGltfFiles()) {
convertColladaToglTF(colladaBundle, buildingDirectory, buildingModelFile);
if (config.getProject().getKmlExporter().isNotCreateColladaFiles()) {
Set<String> keySet = colladaBundle.getTexImages().keySet();
Iterator<String> iterator = keySet.iterator();
while (iterator.hasNext()) {
String imageFilename = iterator.next();
File imageFile = new File(buildingDirectory, imageFilename);
if (imageFile.exists())
imageFile.delete();
}
}
}
// ----------------- balloon saving -----------------
if (colladaBundle.getExternalBalloonFileContent() != null) {
try {
File balloonsDirectory = new File(buildingDirectory + File.separator + BalloonTemplateHandlerImpl.balloonDirectoryName);
if (!balloonsDirectory.exists()) {
balloonsDirectory.mkdir();
}
File htmlFile = new File(balloonsDirectory, colladaBundle.getGmlId() + '-' + colladaBundle.getId() + ".html");
FileOutputStream outputStream = new FileOutputStream(htmlFile);
outputStream.write(colladaBundle.getExternalBalloonFileContent().getBytes(CHARSET));
outputStream.close();
}
catch (IOException ioe) {
ioe.printStackTrace();
}
}
}
}
private void convertColladaToglTF(ColladaBundle colladaBundle, File buildingDirectory, File buildingModelFile) {
String collada2gltfPath = config.getProject().getKmlExporter().getPathOfGltfConverter();
File collada2gltfFile = new File(collada2gltfPath);
if (collada2gltfFile.exists()) {
ProcessBuilder pb = new ProcessBuilder(collada2gltfFile.getAbsolutePath(), "-f", colladaBundle.getGmlId() + ".dae", "-e", "true");
pb.directory(buildingDirectory);
Process process = null;
try {
process = pb.start();
process.waitFor();
} catch (IOException|InterruptedException e) {}
}
if (config.getProject().getKmlExporter().isNotCreateColladaFiles()) {
buildingModelFile.delete();
}
}
}
| [KML/COLLADA/glTF] fixed issue resulting in missing glTF model when
deactivating COLLADA-Export | src/org/citydb/modules/kml/database/KmlExporterManager.java | [KML/COLLADA/glTF] fixed issue resulting in missing glTF model when deactivating COLLADA-Export |
|
Java | apache-2.0 | 46ab8da28e552182d9ffe8ae206a9b13acda2611 | 0 | christoflemke/xml.entity | /*
* Copyright 2013 Christof Lemke
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package xml.entity.immutableelement;
import javax.annotation.Nonnull;
import javax.annotation.concurrent.Immutable;
import javax.inject.Inject;
import xml.entity.select.DefaultSelector;
import xml.entity.select.PathParser;
import xml.entity.select.Selector;
import com.google.common.collect.ImmutableList;
@Immutable
public class ImmutableElementFactory
{
private final Selector selector;
@Inject
public ImmutableElementFactory(@Nonnull final Selector selector)
{
this.selector = selector;
}
private ImmutableElementFactory()
{
this.selector = new DefaultSelector(PathParser.create(), this);
}
/**
* Create an instance of the default implementation
*
* @return A new factory
*/
public static ImmutableElementFactory create()
{
return new ImmutableElementFactory();
}
/**
* Create an internal node.
*
* @param name
* The element name.
* @param children
* The children of the new node.
* @return A node
*/
public ImmutableElement createNode(
@Nonnull final String name,
@Nonnull final ImmutableList<ImmutableElement> children)
{
return new InternalElement(name, children, this.selector);
}
/**
* Create an attribute node
*
* @param name
* The name of the attribute
* @param value
* The value of the attribute
* @return A new attribute node
*/
public ImmutableElement createAttr(
@Nonnull final String name,
@Nonnull final String value)
{
return new Attribute(name, value, this.selector);
}
/**
* Create a text node.
*
* @param value
* The text content of this node
* @return A text node
*/
public ImmutableElement createText(@Nonnull final String value)
{
return new Text(value, this.selector);
}
/**
* Create a node without children
*
* @param name
* The element name
* @return A node
*/
public ImmutableElement createLeaf(@Nonnull final String name)
{
final ImmutableList<ImmutableElement> of = ImmutableList.of();
return new InternalElement(name, of, this.selector);
}
}
| src/xml/entity/immutableelement/ImmutableElementFactory.java | /*
* Copyright 2013 Christof Lemke
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package xml.entity.immutableelement;
import javax.inject.Inject;
import xml.entity.select.DefaultSelector;
import xml.entity.select.PathParser;
import xml.entity.select.Selector;
import com.google.common.collect.ImmutableList;
public class ImmutableElementFactory
{
private final Selector selector;
@Inject
public ImmutableElementFactory(final Selector selector)
{
this.selector = selector;
}
private ImmutableElementFactory()
{
this.selector = new DefaultSelector(PathParser.create(), this);
}
public static ImmutableElementFactory create()
{
return new ImmutableElementFactory();
}
public ImmutableElement createNode(final String name, final ImmutableList<ImmutableElement> children)
{
return new InternalElement(name, children, this.selector);
}
public ImmutableElement createAttr(final String name, final String value)
{
return new Attribute(name, value, this.selector);
}
public ImmutableElement createText(final String value)
{
return new Text(value, this.selector);
}
public ImmutableElement createLeaf(final String name)
{
final ImmutableList<ImmutableElement> of = ImmutableList.of();
return new InternalElement(name, of, this.selector);
}
}
| javadoc | src/xml/entity/immutableelement/ImmutableElementFactory.java | javadoc |
|
Java | apache-2.0 | f7092c1a8e2849b8f1a5276a533a60a38753831e | 0 | aika-algorithm/aika,aika-algorithm/aika | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package network.aika.neuron.activation;
import network.aika.Document;
import network.aika.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static network.aika.neuron.activation.SearchNode.Decision.SELECTED;
import static network.aika.neuron.activation.SearchNode.Decision.EXCLUDED;
import static network.aika.neuron.activation.SearchNode.Decision.UNKNOWN;
import java.util.*;
/**
* The {@code SearchNode} class represents a node in the binary search tree that is used to find the optimal
* interpretation for a given document. Each search node possess a refinement (simply a set of interpretation nodes).
* The two options that this search node examines are that the refinement will either part of the final interpretation or not.
* During each search step the activation values in all the neuron activations adjusted such that they reflect the interpretation of the current search path.
* When the search reaches the maximum depth of the search tree and no further refinements exists, a weight is computed evaluating the current search path.
* The search path with the highest weight is used to determine the final interpretation.
* <p>
* <p> Before the search is started a set of initial refinements is generated from the conflicts within the document.
* In other words, if there are no conflicts in a given document, then no search is needed. In this case the final interpretation
* will simply be the set of all interpretation nodes. The initial refinements are then expanded, meaning all interpretation nodes that are consistent
* with this refinement are added to the refinement. The initial refinements are then propagated along the search path as refinement candidates.
*
* @author Lukas Molzberger
*/
public class SearchNode implements Comparable<SearchNode> {
private static final Logger log = LoggerFactory.getLogger(SearchNode.class);
public static int MAX_SEARCH_STEPS = Integer.MAX_VALUE;
public static boolean ENABLE_CACHING = true;
public static boolean OPTIMIZE_SEARCH = true;
public static boolean COMPUTE_SOFT_MAX = false;
public int id;
SearchNode excludedParent;
SearchNode selectedParent;
long visited;
public Candidate candidate;
int level;
DebugState debugState;
public enum Decision {
SELECTED('S'),
EXCLUDED('E'),
UNKNOWN('U');
char s;
Decision(char s) {
this.s = s;
}
}
public enum DebugState {
CACHED,
LIMITED,
EXPLORE
}
double weightDelta;
public double accumulatedWeight = 0.0;
public Map<Activation, Activation.StateChange> modifiedActs = new TreeMap<>(Activation.ACTIVATION_ID_COMP);
private Step step = Step.INIT;
private boolean alreadySelected;
private boolean alreadyExcluded;
private SearchNode selectedChild = null;
private SearchNode excludedChild = null;
private double selectedWeight = 0.0;
private double excludedWeight = 0.0;
private long processVisited;
private boolean bestPath;
// Avoids having to search the same path twice.
private Decision skip = UNKNOWN;
private enum Step {
INIT,
PREPARE_SELECT,
SELECT,
POST_SELECT,
PREPARE_EXCLUDE,
EXCLUDE,
POST_EXCLUDE,
FINAL
}
public SearchNode(Document doc, SearchNode selParent, SearchNode exclParent, int level) {
id = doc.searchNodeIdCounter++;
this.level = level;
visited = doc.visitedCounter++;
selectedParent = selParent;
excludedParent = exclParent;
Candidate c = getParent() != null ? getParent().candidate : null;
SearchNode csn = null;
boolean modified = true;
if (c != null) {
c.currentSearchNode = this;
csn = c.cachedSearchNode;
if (csn == null || csn.getDecision() != getDecision()) {
Activation act = c.activation;
act.markDirty(visited);
for (Activation.Link l : act.neuronOutputs.values()) {
l.output.markDirty(visited);
}
} else {
modified = csn.isModified();
if (modified) {
c.debugComputed[2]++;
}
}
}
if(modified) {
weightDelta = doc.vQueue.process(this);
markDirty();
if(c != null) {
c.cachedSearchNode = this;
}
} else {
if(ENABLE_CACHING) {
c.cachedSearchNode.changeState(Activation.Mode.NEW);
weightDelta = c.cachedSearchNode.weightDelta;
for(Activation act: c.cachedSearchNode.modifiedActs.keySet()) {
act.saveOldState(modifiedActs, doc.visitedCounter++);
act.saveNewState();
}
} else {
weightDelta = doc.vQueue.process(this);
if (Math.abs(weightDelta - csn.weightDelta) > 0.00001 || !compareNewState(csn)) {
log.error("Cached search node activation do not match the newly computed results.");
log.info("Computed results:");
dumpDebugState();
log.info("Cached results:");
csn.dumpDebugState();
}
}
}
if (c != null) {
c.debugComputed[modified ? 1 : 0]++;
}
if (getParent() != null) {
accumulatedWeight = weightDelta + getParent().accumulatedWeight;
}
}
private boolean isModified() {
for (Activation.StateChange sc : modifiedActs.values()) {
if (sc.getActivation().markedDirty > visited || sc.newState != sc.getActivation().decision) {
return true;
}
if(sc.newRounds.isActive()) {
for (Activation.Link l : sc.getActivation().neuronOutputs.values()) {
if (l.output.decision != UNKNOWN &&
l.output.markedDirty > visited) {
return true;
}
}
}
}
return false;
}
private void markDirty() {
if(getParent() == null || getParent().candidate == null) return;
SearchNode csn = getParent().candidate.cachedSearchNode;
Set<Activation> acts = new TreeSet<>(Activation.ACTIVATION_ID_COMP);
acts.addAll(modifiedActs.keySet());
if(csn != null) {
acts.addAll(csn.modifiedActs.keySet());
}
acts.forEach(act -> {
Activation.StateChange sca = modifiedActs.get(act);
Activation.StateChange scb = csn != null ? csn.modifiedActs.get(act) : null;
if (sca == null || scb == null || !sca.newRounds.compare(scb.newRounds)) {
for (Activation.Link l : act.neuronOutputs.values()) {
l.output.markDirty(visited);
}
}
});
}
public boolean compareNewState(SearchNode cachedNode) {
if (modifiedActs == null && cachedNode.modifiedActs == null) return true;
if (modifiedActs == null || cachedNode.modifiedActs == null) return false;
if (modifiedActs.size() != cachedNode.modifiedActs.size()) {
return false;
}
for (Map.Entry<Activation, Activation.StateChange> me: modifiedActs.entrySet()) {
Activation.StateChange sca = me.getValue();
Activation.StateChange scb = cachedNode.modifiedActs.get(me.getKey());
if (!sca.newRounds.compare(scb.newRounds)) {
return false;
}
}
return true;
}
public void dumpDebugState() {
SearchNode n = this;
String weights = "";
Decision decision = UNKNOWN;
while (n != null && n.level >= 0) {
log.info(
n.level + " " +
n.debugState +
" DECISION:" + decision +
" " + (n.candidate != null ? n.candidate.toString() : "") +
" MOD-ACTS:" + n.modifiedActs.size() +
weights
);
decision = n.getDecision();
weights = " AW:" + Utils.round(n.accumulatedWeight) +
" DW:" + Utils.round(n.weightDelta);
n = n.getParent();
}
}
/**
* Searches for the best interpretation for the given document.
*
* This implementation of the algorithm is iterative to prevent stack overflow errors from happening.
* Depending on the document the search tree might be getting very deep.
*
* @param doc
* @param root
*/
public static void search(Document doc, SearchNode root, long v, Long timeoutInMilliSeconds) throws TimeoutException {
SearchNode sn = root;
double returnWeight = 0.0;
long startTime = System.currentTimeMillis();
do {
if (sn.processVisited != v) {
sn.step = Step.INIT;
sn.processVisited = v;
}
switch(sn.step) {
case INIT:
if (sn.level >= doc.candidates.size()) {
if(timeoutInMilliSeconds != null && System.currentTimeMillis() > startTime + timeoutInMilliSeconds) {
throw new TimeoutException("Interpretation search took too long: " + (System.currentTimeMillis() - startTime) + "ms");
}
returnWeight = sn.processResult(doc);
sn.step = Step.FINAL;
sn = sn.getParent();
} else {
sn.initStep(doc);
sn.step = Step.PREPARE_SELECT;
}
break;
case PREPARE_SELECT:
sn.step = sn.prepareSelectStep(doc) ? Step.SELECT : Step.PREPARE_EXCLUDE;
break;
case SELECT:
sn.step = Step.POST_SELECT;
sn = sn.selectedChild;
break;
case POST_SELECT:
sn.selectedWeight = returnWeight;
sn.postReturn(sn.selectedChild);
sn.step = Step.PREPARE_EXCLUDE;
break;
case PREPARE_EXCLUDE:
sn.step = sn.prepareExcludeStep(doc) ? Step.EXCLUDE : Step.FINAL;
break;
case EXCLUDE:
sn.step = Step.POST_EXCLUDE;
sn = sn.excludedChild;
break;
case POST_EXCLUDE:
sn.excludedWeight = returnWeight;
sn.postReturn(sn.excludedChild);
sn.step = Step.FINAL;
break;
case FINAL:
returnWeight = sn.finalStep();
SearchNode pn = sn.getParent();
if(pn != null) {
pn.skip = sn.getDecision();
}
sn = pn;
break;
default:
}
} while(sn != null);
}
private void initStep(Document doc) {
candidate = doc.candidates.get(level);
boolean precondition = checkPrecondition();
alreadySelected = precondition && !candidate.isConflicting() || candidate.activation.inputDecision == SELECTED;
alreadyExcluded = !precondition || checkExcluded(candidate.activation) || candidate.activation.inputDecision == EXCLUDED;
if (doc.searchStepCounter > MAX_SEARCH_STEPS) {
dumpDebugState();
throw new RuntimeException("Max search step exceeded.");
}
doc.searchStepCounter++;
storeDebugInfos();
}
private Decision getCachedDecision() {
return !alreadyExcluded ? candidate.cachedDecision : Decision.UNKNOWN;
}
private boolean prepareSelectStep(Document doc) {
if(alreadyExcluded || skip == SELECTED || (OPTIMIZE_SEARCH && getCachedDecision() == Decision.EXCLUDED)) return false;
candidate.activation.setDecision(SELECTED, visited);
if (candidate.cachedDecision == UNKNOWN) {
invalidateCachedDecisions();
}
selectedChild = new SearchNode(doc, this, excludedParent, level + 1);
candidate.debugDecisionCounts[0]++;
return true;
}
private boolean prepareExcludeStep(Document doc) {
if(alreadySelected || skip == EXCLUDED || (OPTIMIZE_SEARCH && getCachedDecision() == Decision.SELECTED) || allOthersExcluded()) return false;
candidate.activation.setDecision(EXCLUDED, visited);
excludedChild = new SearchNode(doc, selectedParent, this, level + 1);
candidate.debugDecisionCounts[1]++;
return true;
}
private boolean allOthersExcluded() {
for(Activation cAct: candidate.activation.getConflicts()) {
if(cAct.decision != EXCLUDED) return false;
}
return true;
}
private void postReturn(SearchNode child) {
child.changeState(Activation.Mode.OLD);
candidate.activation.setDecision(UNKNOWN, visited);
candidate.activation.rounds.reset();
}
private double finalStep() {
Decision d;
Decision cd = getCachedDecision();
if(cd == UNKNOWN) {
d = alreadySelected || (!alreadyExcluded && selectedWeight >= excludedWeight) ? SELECTED : EXCLUDED;
if (!alreadyExcluded) {
candidate.cachedDecision = d;
}
} else {
d = cd;
}
SearchNode cn = d == SELECTED ? selectedChild : excludedChild;
if(cn.bestPath) {
candidate.bestChildNode = cn;
bestPath = true;
}
if(!bestPath || d != SELECTED) {
selectedChild = null;
}
if(!bestPath || d != EXCLUDED) {
excludedChild = null;
}
return d == SELECTED ? selectedWeight : excludedWeight;
}
private boolean checkPrecondition() {
Set soin = candidate.activation.selectedNeuronInputs;
return soin != null && !soin.isEmpty();
}
private void invalidateCachedDecisions() {
for (Activation.Link l : candidate.activation.neuronOutputs.values()) {
if (!l.synapse.isNegative()) {
invalidateCachedDecision(l.output);
}
}
}
public static void invalidateCachedDecision(Activation act) {
Candidate pos = act.candidate;
if (pos != null) {
if (pos.cachedDecision == Decision.EXCLUDED) {
pos.cachedDecision = UNKNOWN;
}
}
for (Activation c : act.getConflicts()) {
Candidate neg = c.candidate;
if (neg != null) {
if (neg.cachedDecision == Decision.SELECTED) {
neg.cachedDecision = UNKNOWN;
}
}
}
}
private double processResult(Document doc) {
double accNW = accumulatedWeight;
if (level > doc.selectedSearchNode.level || accNW > getSelectedAccumulatedWeight(doc)) {
doc.selectedSearchNode = this;
storeFinalState(this);
bestPath = true;
} else {
bestPath = false;
}
if(COMPUTE_SOFT_MAX) {
storeSearchState(doc);
}
return accumulatedWeight;
}
private void storeSearchState(Document doc) {
doc.searchNodeWeights.put(id, accumulatedWeight);
SearchNode sn = this;
while(sn != null) {
if(sn.candidate != null) {
Activation act = sn.candidate.activation;
if(act.searchStates == null) {
act.searchStates = new TreeMap<>();
}
act.searchStates.put(id, act.rounds.getLast());
}
sn = sn.getParent();
}
}
private static void storeFinalState(SearchNode sn) {
while(sn != null) {
if(sn.candidate != null) {
Activation act = sn.candidate.activation;
act.finalRounds = act.rounds.copy();
act.finalDecision = act.decision;
}
sn = sn.getParent();
}
}
private double getSelectedAccumulatedWeight(Document doc) {
return doc.selectedSearchNode != null ? doc.selectedSearchNode.accumulatedWeight : -1.0;
}
private boolean checkExcluded(Activation ref) {
for (Activation cn : ref.getConflicts()) {
if (cn.decision == SELECTED) return true;
}
return false;
}
public String pathToString() {
return (selectedParent != null ? selectedParent.pathToString() : "") + " - " + toString();
}
public String toString() {
return candidate.activation.id + " Decision:" + getDecision();
}
public void changeState(Activation.Mode m) {
for (Activation.StateChange sc : modifiedActs.values()) {
sc.restoreState(m);
}
}
@Override
public int compareTo(SearchNode sn) {
return Integer.compare(id, sn.id);
}
public SearchNode getParent() {
return getDecision() == SELECTED ? selectedParent : excludedParent;
}
public Decision getDecision() {
return excludedParent == null || (selectedParent != null && selectedParent.id > excludedParent.id) ? SELECTED : EXCLUDED;
}
private void storeDebugInfos() {
if (alreadyExcluded || alreadySelected) {
debugState = DebugState.LIMITED;
} else if (getCachedDecision() != UNKNOWN) {
debugState = DebugState.CACHED;
} else {
debugState = DebugState.EXPLORE;
}
candidate.debugCounts[debugState.ordinal()]++;
}
public static class TimeoutException extends RuntimeException {
public TimeoutException(String message) {
super(message);
}
}
}
| src/main/java/network/aika/neuron/activation/SearchNode.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package network.aika.neuron.activation;
import network.aika.Document;
import network.aika.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static network.aika.neuron.activation.SearchNode.Decision.SELECTED;
import static network.aika.neuron.activation.SearchNode.Decision.EXCLUDED;
import static network.aika.neuron.activation.SearchNode.Decision.UNKNOWN;
import java.util.*;
/**
* The {@code SearchNode} class represents a node in the binary search tree that is used to find the optimal
* interpretation for a given document. Each search node possess a refinement (simply a set of interpretation nodes).
* The two options that this search node examines are that the refinement will either part of the final interpretation or not.
* During each search step the activation values in all the neuron activations adjusted such that they reflect the interpretation of the current search path.
* When the search reaches the maximum depth of the search tree and no further refinements exists, a weight is computed evaluating the current search path.
* The search path with the highest weight is used to determine the final interpretation.
* <p>
* <p> Before the search is started a set of initial refinements is generated from the conflicts within the document.
* In other words, if there are no conflicts in a given document, then no search is needed. In this case the final interpretation
* will simply be the set of all interpretation nodes. The initial refinements are then expanded, meaning all interpretation nodes that are consistent
* with this refinement are added to the refinement. The initial refinements are then propagated along the search path as refinement candidates.
*
* @author Lukas Molzberger
*/
public class SearchNode implements Comparable<SearchNode> {
private static final Logger log = LoggerFactory.getLogger(SearchNode.class);
public static int MAX_SEARCH_STEPS = Integer.MAX_VALUE;
public static boolean ENABLE_CACHING = true;
public static boolean OPTIMIZE_SEARCH = true;
public static boolean COMPUTE_SOFT_MAX = false;
public int id;
SearchNode excludedParent;
SearchNode selectedParent;
long visited;
public Candidate candidate;
int level;
DebugState debugState;
public enum Decision {
SELECTED('S'),
EXCLUDED('E'),
UNKNOWN('U');
char s;
Decision(char s) {
this.s = s;
}
}
public enum DebugState {
CACHED,
LIMITED,
EXPLORE
}
double weightDelta;
public double accumulatedWeight = 0.0;
public Map<Activation, Activation.StateChange> modifiedActs = new TreeMap<>(Activation.ACTIVATION_ID_COMP);
private Step step = Step.INIT;
private boolean alreadySelected;
private boolean alreadyExcluded;
private SearchNode selectedChild = null;
private SearchNode excludedChild = null;
private double selectedWeight = 0.0;
private double excludedWeight = 0.0;
private long processVisited;
private boolean bestPath;
// Avoids having to search the same path twice.
private Decision skip = UNKNOWN;
private enum Step {
INIT,
PREPARE_SELECT,
SELECT,
POST_SELECT,
PREPARE_EXCLUDE,
EXCLUDE,
POST_EXCLUDE,
FINAL
}
public SearchNode(Document doc, SearchNode selParent, SearchNode exclParent, int level) {
id = doc.searchNodeIdCounter++;
this.level = level;
visited = doc.visitedCounter++;
selectedParent = selParent;
excludedParent = exclParent;
Candidate c = getParent() != null ? getParent().candidate : null;
SearchNode csn = null;
boolean modified = true;
if (c != null) {
c.currentSearchNode = this;
csn = c.cachedSearchNode;
if (csn == null || csn.getDecision() != getDecision()) {
Activation act = c.activation;
act.markDirty(visited);
for (Activation.Link l : act.neuronOutputs.values()) {
l.output.markDirty(visited);
}
} else {
modified = csn.isModified();
if (modified) {
c.debugComputed[2]++;
}
}
}
if(modified) {
weightDelta = doc.vQueue.process(this);
markDirty();
if(c != null) {
c.cachedSearchNode = this;
}
} else {
if(ENABLE_CACHING) {
c.cachedSearchNode.changeState(Activation.Mode.NEW);
weightDelta = c.cachedSearchNode.weightDelta;
for(Activation act: c.cachedSearchNode.modifiedActs.keySet()) {
act.saveOldState(modifiedActs, doc.visitedCounter++);
act.saveNewState();
}
} else {
weightDelta = doc.vQueue.process(this);
if (Math.abs(weightDelta - csn.weightDelta) > 0.00001 || !compareNewState(csn)) {
log.error("Cached search node activation do not match the newly computed results.");
log.info("Computed results:");
dumpDebugState();
log.info("Cached results:");
csn.dumpDebugState();
}
}
}
if (c != null) {
c.debugComputed[modified ? 1 : 0]++;
}
if (getParent() != null) {
accumulatedWeight = weightDelta + getParent().accumulatedWeight;
}
}
private boolean isModified() {
for (Activation.StateChange sc : modifiedActs.values()) {
if (sc.getActivation().markedDirty > visited || sc.newState != sc.getActivation().decision) {
return true;
}
if(sc.newRounds.isActive()) {
for (Activation.Link l : sc.getActivation().neuronOutputs.values()) {
if (l.output.decision != UNKNOWN &&
l.output.markedDirty > visited) {
return true;
}
}
}
}
return false;
}
private void markDirty() {
if(getParent() == null || getParent().candidate == null) return;
SearchNode csn = getParent().candidate.cachedSearchNode;
Set<Activation> acts = new TreeSet<>(Activation.ACTIVATION_ID_COMP);
acts.addAll(modifiedActs.keySet());
if(csn != null) {
acts.addAll(csn.modifiedActs.keySet());
}
acts.forEach(act -> {
Activation.StateChange sca = modifiedActs.get(act);
Activation.StateChange scb = csn != null ? csn.modifiedActs.get(act) : null;
if (sca == null || scb == null || !sca.newRounds.compare(scb.newRounds)) {
for (Activation.Link l : act.neuronOutputs.values()) {
l.output.markDirty(visited);
}
}
});
}
public boolean compareNewState(SearchNode cachedNode) {
if (modifiedActs == null && cachedNode.modifiedActs == null) return true;
if (modifiedActs == null || cachedNode.modifiedActs == null) return false;
if (modifiedActs.size() != cachedNode.modifiedActs.size()) {
return false;
}
for (Map.Entry<Activation, Activation.StateChange> me: modifiedActs.entrySet()) {
Activation.StateChange sca = me.getValue();
Activation.StateChange scb = cachedNode.modifiedActs.get(me.getKey());
if (!sca.newRounds.compare(scb.newRounds)) {
return false;
}
}
return true;
}
public void dumpDebugState() {
SearchNode n = this;
String weights = "";
Decision decision = UNKNOWN;
while (n != null && n.level >= 0) {
log.info(
n.level + " " +
n.debugState +
" DECISION:" + decision +
" " + (n.candidate != null ? n.candidate.toString() : "") +
" MOD-ACTS:" + n.modifiedActs.size() +
weights
);
decision = n.getDecision();
weights = " AW:" + Utils.round(n.accumulatedWeight) +
" DW:" + Utils.round(n.weightDelta);
n = n.getParent();
}
}
/**
* Searches for the best interpretation for the given document.
*
* This implementation of the algorithm is iterative to prevent stack overflow errors from happening.
* Depending on the document the search tree might be getting very deep.
*
* @param doc
* @param root
*/
public static void search(Document doc, SearchNode root, long v, Long timeoutInMilliSeconds) throws TimeoutException {
SearchNode sn = root;
double returnWeight = 0.0;
long startTime = System.currentTimeMillis();
do {
if (sn.processVisited != v) {
sn.step = Step.INIT;
sn.processVisited = v;
}
switch(sn.step) {
case INIT:
if (sn.level >= doc.candidates.size()) {
if(timeoutInMilliSeconds != null && System.currentTimeMillis() > startTime + timeoutInMilliSeconds) {
throw new TimeoutException("Interpretation search took too long: " + (System.currentTimeMillis() - startTime) + "ms");
}
returnWeight = sn.processResult(doc);
sn.step = Step.FINAL;
sn = sn.getParent();
} else {
sn.initStep(doc);
sn.step = Step.PREPARE_SELECT;
}
break;
case PREPARE_SELECT:
sn.step = sn.prepareSelectStep(doc) ? Step.SELECT : Step.PREPARE_EXCLUDE;
break;
case SELECT:
sn.step = Step.POST_SELECT;
sn = sn.selectedChild;
break;
case POST_SELECT:
sn.selectedWeight = returnWeight;
sn.postReturn(sn.selectedChild);
sn.step = Step.PREPARE_EXCLUDE;
break;
case PREPARE_EXCLUDE:
sn.step = sn.prepareExcludeStep(doc) ? Step.EXCLUDE : Step.FINAL;
break;
case EXCLUDE:
sn.step = Step.POST_EXCLUDE;
sn = sn.excludedChild;
break;
case POST_EXCLUDE:
sn.excludedWeight = returnWeight;
sn.postReturn(sn.excludedChild);
sn.step = Step.FINAL;
break;
case FINAL:
returnWeight = sn.finalStep();
SearchNode pn = sn.getParent();
if(pn != null) {
pn.skip = sn.getDecision();
}
sn = pn;
break;
default:
}
} while(sn != null);
}
private void initStep(Document doc) {
candidate = doc.candidates.get(level);
boolean precondition = checkPrecondition();
alreadySelected = precondition && !candidate.isConflicting() || candidate.activation.inputDecision == SELECTED;
alreadyExcluded = !precondition || checkExcluded(candidate.activation) || candidate.activation.inputDecision == EXCLUDED;
if (doc.searchStepCounter > MAX_SEARCH_STEPS) {
dumpDebugState();
throw new RuntimeException("Max search step exceeded.");
}
doc.searchStepCounter++;
storeDebugInfos();
}
private Decision getCachedDecision() {
return !alreadyExcluded ? candidate.cachedDecision : Decision.UNKNOWN;
}
private boolean prepareSelectStep(Document doc) {
if(alreadyExcluded || skip == SELECTED || (OPTIMIZE_SEARCH && getCachedDecision() == Decision.EXCLUDED)) return false;
candidate.activation.setDecision(SELECTED, visited);
if (candidate.cachedDecision == UNKNOWN) {
invalidateCachedDecisions();
}
selectedChild = new SearchNode(doc, this, excludedParent, level + 1);
candidate.debugDecisionCounts[0]++;
return true;
}
private boolean prepareExcludeStep(Document doc) {
if(alreadySelected || skip == EXCLUDED || (OPTIMIZE_SEARCH && getCachedDecision() == Decision.SELECTED)) return false;
candidate.activation.setDecision(EXCLUDED, visited);
excludedChild = new SearchNode(doc, selectedParent, this, level + 1);
candidate.debugDecisionCounts[1]++;
return true;
}
private void postReturn(SearchNode child) {
child.changeState(Activation.Mode.OLD);
candidate.activation.setDecision(UNKNOWN, visited);
candidate.activation.rounds.reset();
}
private double finalStep() {
Decision d;
Decision cd = getCachedDecision();
if(cd == UNKNOWN) {
d = alreadySelected || (!alreadyExcluded && selectedWeight >= excludedWeight) ? SELECTED : EXCLUDED;
if (!alreadyExcluded) {
candidate.cachedDecision = d;
}
} else {
d = cd;
}
SearchNode cn = d == SELECTED ? selectedChild : excludedChild;
if(cn.bestPath) {
candidate.bestChildNode = cn;
bestPath = true;
}
if(!bestPath || d != SELECTED) {
selectedChild = null;
}
if(!bestPath || d != EXCLUDED) {
excludedChild = null;
}
return d == SELECTED ? selectedWeight : excludedWeight;
}
private boolean checkPrecondition() {
Set soin = candidate.activation.selectedNeuronInputs;
return soin != null && !soin.isEmpty();
}
private void invalidateCachedDecisions() {
for (Activation.Link l : candidate.activation.neuronOutputs.values()) {
if (!l.synapse.isNegative()) {
invalidateCachedDecision(l.output);
}
}
}
public static void invalidateCachedDecision(Activation act) {
Candidate pos = act.candidate;
if (pos != null) {
if (pos.cachedDecision == Decision.EXCLUDED) {
pos.cachedDecision = UNKNOWN;
}
}
for (Activation c : act.getConflicts()) {
Candidate neg = c.candidate;
if (neg != null) {
if (neg.cachedDecision == Decision.SELECTED) {
neg.cachedDecision = UNKNOWN;
}
}
}
}
private double processResult(Document doc) {
double accNW = accumulatedWeight;
if (level > doc.selectedSearchNode.level || accNW > getSelectedAccumulatedWeight(doc)) {
doc.selectedSearchNode = this;
storeFinalState(this);
bestPath = true;
} else {
bestPath = false;
}
if(COMPUTE_SOFT_MAX) {
storeSearchState(doc);
}
return accumulatedWeight;
}
private void storeSearchState(Document doc) {
doc.searchNodeWeights.put(id, accumulatedWeight);
SearchNode sn = this;
while(sn != null) {
if(sn.candidate != null) {
Activation act = sn.candidate.activation;
if(act.searchStates == null) {
act.searchStates = new TreeMap<>();
}
act.searchStates.put(id, act.rounds.getLast());
}
sn = sn.getParent();
}
}
private static void storeFinalState(SearchNode sn) {
while(sn != null) {
if(sn.candidate != null) {
Activation act = sn.candidate.activation;
act.finalRounds = act.rounds.copy();
act.finalDecision = act.decision;
}
sn = sn.getParent();
}
}
private double getSelectedAccumulatedWeight(Document doc) {
return doc.selectedSearchNode != null ? doc.selectedSearchNode.accumulatedWeight : -1.0;
}
private boolean checkExcluded(Activation ref) {
for (Activation cn : ref.getConflicts()) {
if (cn.decision == SELECTED) return true;
}
return false;
}
public String pathToString() {
return (selectedParent != null ? selectedParent.pathToString() : "") + " - " + toString();
}
public String toString() {
return candidate.activation.id + " Decision:" + getDecision();
}
public void changeState(Activation.Mode m) {
for (Activation.StateChange sc : modifiedActs.values()) {
sc.restoreState(m);
}
}
@Override
public int compareTo(SearchNode sn) {
return Integer.compare(id, sn.id);
}
public SearchNode getParent() {
return getDecision() == SELECTED ? selectedParent : excludedParent;
}
public Decision getDecision() {
return excludedParent == null || (selectedParent != null && selectedParent.id > excludedParent.id) ? SELECTED : EXCLUDED;
}
private void storeDebugInfos() {
if (alreadyExcluded || alreadySelected) {
debugState = DebugState.LIMITED;
} else if (getCachedDecision() != UNKNOWN) {
debugState = DebugState.CACHED;
} else {
debugState = DebugState.EXPLORE;
}
candidate.debugCounts[debugState.ordinal()]++;
}
public static class TimeoutException extends RuntimeException {
public TimeoutException(String message) {
super(message);
}
}
}
| avoid the case that all options are excluded
| src/main/java/network/aika/neuron/activation/SearchNode.java | avoid the case that all options are excluded |
|
Java | apache-2.0 | 3409fbda588a5bee0e90df15f04fb29312d97fd7 | 0 | doctales/dita-ot,dita-ot/dita-ot,shaneataylor/dita-ot,drmacro/dita-ot,shaneataylor/dita-ot,Hasimir/dita-ot,infotexture/dita-ot,robander/dita-ot,drmacro/dita-ot,shaneataylor/dita-ot,infotexture/dita-ot,eerohele/dita-ot,queshaw/dita-ot,jelovirt/muuntaja,robander/dita-ot,shaneataylor/dita-ot,queshaw/dita-ot,robander/dita-ot,dita-ot/dita-ot,drmacro/dita-ot,dita-ot/dita-ot,eerohele/dita-ot,queshaw/dita-ot,infotexture/dita-ot,zanyants/dita-ot,dita-ot/dita-ot,shaneataylor/dita-ot,Hasimir/dita-ot,drmacro/dita-ot,robander/dita-ot,doctales/dita-ot,infotexture/dita-ot,drmacro/dita-ot,dita-ot/dita-ot,doctales/dita-ot,jelovirt/muuntaja,robander/dita-ot,infotexture/dita-ot,queshaw/dita-ot,eerohele/dita-ot,Hasimir/dita-ot,eerohele/dita-ot,zanyants/dita-ot,zanyants/dita-ot,jelovirt/muuntaja,doctales/dita-ot,zanyants/dita-ot,Hasimir/dita-ot | /*
* This file is part of the DITA Open Toolkit project hosted on
* Sourceforge.net. See the accompanying license.txt file for
* applicable licenses.
*/
/**
* (c) Copyright IBM Corp. 2011 All Rights Reserved.
*/
package org.dita.dost.util;
import static org.dita.dost.util.Constants.*;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedList;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.StringTokenizer;
/**
* Definition of current job.
*
* <p>Instances are thread-safe.</p>
*
* @since 1.5.4
*/
public final class Job {
private final Properties prop;
private final File tempDir;
/**
* Create new job configuration instance. Initialise by reading temporary configuration files.
*
* @param tempDir temporary directory
* @throws IOException if reading configuration files failed
* @throws IllegalStateException if configuration files are missing
*/
public Job(final File tempDir) throws IOException {
this.tempDir = tempDir;
prop = new Properties();
read();
}
/**
* Read temporary configuration files.
*
* @throws IOException if reading configuration files failed
* @throws IllegalStateException if configuration files are missing
*/
private void read() throws IOException {
final File ditalist = new File(tempDir, FILE_NAME_DITA_LIST);
final File xmlDitalist=new File(tempDir, FILE_NAME_DITA_LIST_XML);
InputStream in = null;
try{
if(xmlDitalist.exists()) {
in = new FileInputStream(xmlDitalist);
prop.loadFromXML(in);
} else if(ditalist.exists()) {
in = new FileInputStream(ditalist);
prop.load(in);
} else {
throw new IllegalStateException("Job configuration files not found");
}
} catch(final IOException e) {
throw new IOException("Failed to read file: " + e.getMessage());
} finally {
if (in != null) {
try {
in.close();
} catch (final IOException e) {
throw new IOException("Failed to close file: " + e.getMessage());
}
}
}
}
/**
* Store job into temporary configuration files.
*
* @throws IOException if writing configuration files failed
*/
public void write() throws IOException {
FileOutputStream propertiesOutputStream = null;
try {
propertiesOutputStream = new FileOutputStream(new File(tempDir, FILE_NAME_DITA_LIST));
prop.store(propertiesOutputStream, null);
propertiesOutputStream.flush();
} catch (final IOException e) {
throw new IOException("Failed to write file: " + e.getMessage());
} finally {
if (propertiesOutputStream != null) {
try {
propertiesOutputStream.close();
} catch (final IOException e) {
throw new IOException("Failed to close file: " + e.getMessage());
}
}
}
FileOutputStream xmlOutputStream = null;
try {
xmlOutputStream = new FileOutputStream(new File(tempDir, FILE_NAME_DITA_LIST_XML));
prop.storeToXML(xmlOutputStream, null);
xmlOutputStream.flush();
} catch (final IOException e) {
throw new IOException("Failed to write file: " + e.getMessage(), e);
} finally {
if (xmlOutputStream != null) {
try {
xmlOutputStream.close();
} catch (final IOException e) {
throw new IOException("Failed to close file: " + e.getMessage(), e);
}
}
}
}
/**
* Searches for the property with the specified key in this property list.
*
* @param key property key
* @return the value in this property list with the specified key value, {@code null} if not found
*/
public String getProperty(final String key) {
return prop.getProperty(key);
}
/**
* Set property value.
*
* @param key property key
* @param value property value
* @return the previous value of the specified key in this property list, or {@code null} if it did not have one
*/
public String setProperty(final String key, final String value) {
return (String) prop.setProperty(key, value);
}
/**
* Return the copy-to map.
* @return copy-to map
*/
public Map<String, String> getCopytoMap() {
return StringUtils.restoreMap(prop.getProperty(COPYTO_TARGET_TO_SOURCE_MAP_LIST, ""));
}
/**
* @return the schemeSet
*/
public Set<String> getSchemeSet() {
return StringUtils.restoreSet(prop.getProperty(SUBJEC_SCHEME_LIST, ""));
}
/**
* @return the inputMap
*/
public String getInputMap() {
return prop.getProperty(INPUT_DITAMAP);
}
/**
* Get reference list.
*
* <p>TODO: rename to getReferenceList</p>
*
* @return reference list
*/
public LinkedList<String> getCollection() {
final LinkedList<String> refList = new LinkedList<String>();
final String liststr = prop.getProperty(FULL_DITAMAP_TOPIC_LIST, "")
+ COMMA
+ prop.getProperty(CONREF_TARGET_LIST, "")
+ COMMA
+ prop.getProperty(COPYTO_SOURCE_LIST, "");
final StringTokenizer tokenizer = new StringTokenizer(liststr, COMMA);
while (tokenizer.hasMoreTokens()) {
refList.addFirst(tokenizer.nextToken());
}
return refList;
}
/**
* Get input directory.
*
* <p>TODO: rename to getInputDir</p>
*
* @return input directory
*/
public String getValue() {
return prop.getProperty("user.input.dir");
}
}
| src/org/dita/dost/util/Job.java | /*
* This file is part of the DITA Open Toolkit project hosted on
* Sourceforge.net. See the accompanying license.txt file for
* applicable licenses.
*/
/**
* (c) Copyright IBM Corp. 2011 All Rights Reserved.
*/
package org.dita.dost.util;
import static org.dita.dost.util.Constants.*;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.LinkedList;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.StringTokenizer;
/**
* Definition of current job.
*
* <p>Instances are thread-safe.</p>
*
* @since 1.5.4
*/
public final class Job {
private final Properties prop;
private final File tempDir;
/**
* Create new job configuration instance. Initialise by reading temporary configuration files.
*
* @param tempDir temporary directory
* @throws IOException if reading configuration files failed
* @throws IllegalStateException if configuration files are missing
*/
public Job(final File tempDir) throws IOException {
this.tempDir = tempDir;
prop = new Properties();
read();
}
/**
* Read temporary configuration files.
*
* @throws IOException if reading configuration files failed
* @throws IllegalStateException if configuration files are missing
*/
private void read() throws IOException {
final File ditalist = new File(tempDir, FILE_NAME_DITA_LIST);
final File xmlDitalist=new File(tempDir, FILE_NAME_DITA_LIST_XML);
InputStream in = null;
try{
if(xmlDitalist.exists()) {
in = new FileInputStream(xmlDitalist);
prop.loadFromXML(in);
} else if(ditalist.exists()) {
in = new FileInputStream(ditalist);
prop.load(in);
} else {
throw new IllegalStateException("Job configuration files not found");
}
} catch(final IOException e) {
throw new IOException("Failed to read file: " + e.getMessage(), e);
} finally {
if (in != null) {
try {
in.close();
} catch (final IOException e) {
throw new IOException("Failed to close file: " + e.getMessage(), e);
}
}
}
}
/**
* Store job into temporary configuration files.
*
* @throws IOException if writing configuration files failed
*/
public void write() throws IOException {
FileOutputStream propertiesOutputStream = null;
try {
propertiesOutputStream = new FileOutputStream(new File(tempDir, FILE_NAME_DITA_LIST));
prop.store(propertiesOutputStream, null);
propertiesOutputStream.flush();
} catch (final IOException e) {
throw new IOException("Failed to write file: " + e.getMessage(), e);
} finally {
if (propertiesOutputStream != null) {
try {
propertiesOutputStream.close();
} catch (final IOException e) {
throw new IOException("Failed to close file: " + e.getMessage(), e);
}
}
}
FileOutputStream xmlOutputStream = null;
try {
xmlOutputStream = new FileOutputStream(new File(tempDir, FILE_NAME_DITA_LIST_XML));
prop.storeToXML(xmlOutputStream, null);
xmlOutputStream.flush();
} catch (final IOException e) {
throw new IOException("Failed to write file: " + e.getMessage(), e);
} finally {
if (xmlOutputStream != null) {
try {
xmlOutputStream.close();
} catch (final IOException e) {
throw new IOException("Failed to close file: " + e.getMessage(), e);
}
}
}
}
/**
* Searches for the property with the specified key in this property list.
*
* @param key property key
* @return the value in this property list with the specified key value, {@code null} if not found
*/
public String getProperty(final String key) {
return prop.getProperty(key);
}
/**
* Set property value.
*
* @param key property key
* @param value property value
* @return the previous value of the specified key in this property list, or {@code null} if it did not have one
*/
public String setProperty(final String key, final String value) {
return (String) prop.setProperty(key, value);
}
/**
* Return the copy-to map.
* @return copy-to map
*/
public Map<String, String> getCopytoMap() {
return StringUtils.restoreMap(prop.getProperty(COPYTO_TARGET_TO_SOURCE_MAP_LIST, ""));
}
/**
* @return the schemeSet
*/
public Set<String> getSchemeSet() {
return StringUtils.restoreSet(prop.getProperty(SUBJEC_SCHEME_LIST, ""));
}
/**
* @return the inputMap
*/
public String getInputMap() {
return prop.getProperty(INPUT_DITAMAP);
}
/**
* Get reference list.
*
* <p>TODO: rename to getReferenceList</p>
*
* @return reference list
*/
public LinkedList<String> getCollection() {
final LinkedList<String> refList = new LinkedList<String>();
final String liststr = prop.getProperty(FULL_DITAMAP_TOPIC_LIST, "")
+ COMMA
+ prop.getProperty(CONREF_TARGET_LIST, "")
+ COMMA
+ prop.getProperty(COPYTO_SOURCE_LIST, "");
final StringTokenizer tokenizer = new StringTokenizer(liststr, COMMA);
while (tokenizer.hasMoreTokens()) {
refList.addFirst(tokenizer.nextToken());
}
return refList;
}
/**
* Get input directory.
*
* <p>TODO: rename to getInputDir</p>
*
* @return input directory
*/
public String getValue() {
return prop.getProperty("user.input.dir");
}
}
| Fix Java 1.5 compatibility bug | src/org/dita/dost/util/Job.java | Fix Java 1.5 compatibility bug |
|
Java | apache-2.0 | 802c71bca703c4da57130510611944aa62516e53 | 0 | Distrotech/intellij-community,supersven/intellij-community,signed/intellij-community,nicolargo/intellij-community,MichaelNedzelsky/intellij-community,semonte/intellij-community,tmpgit/intellij-community,ivan-fedorov/intellij-community,asedunov/intellij-community,allotria/intellij-community,clumsy/intellij-community,fnouama/intellij-community,da1z/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,blademainer/intellij-community,Distrotech/intellij-community,jagguli/intellij-community,jagguli/intellij-community,hurricup/intellij-community,xfournet/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,fnouama/intellij-community,apixandru/intellij-community,ahb0327/intellij-community,nicolargo/intellij-community,robovm/robovm-studio,clumsy/intellij-community,TangHao1987/intellij-community,kool79/intellij-community,caot/intellij-community,TangHao1987/intellij-community,consulo/consulo,robovm/robovm-studio,michaelgallacher/intellij-community,dslomov/intellij-community,hurricup/intellij-community,retomerz/intellij-community,MER-GROUP/intellij-community,adedayo/intellij-community,vvv1559/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,samthor/intellij-community,da1z/intellij-community,MER-GROUP/intellij-community,pwoodworth/intellij-community,slisson/intellij-community,MichaelNedzelsky/intellij-community,Distrotech/intellij-community,MER-GROUP/intellij-community,wreckJ/intellij-community,retomerz/intellij-community,petteyg/intellij-community,vladmm/intellij-community,holmes/intellij-community,blademainer/intellij-community,slisson/intellij-community,jexp/idea2,jagguli/intellij-community,SerCeMan/intellij-community,Distrotech/intellij-community,apixandru/intellij-community,akosyakov/intellij-community,fitermay/intellij-community,izonder/intellij-community,salguarnieri/intellij-community,holmes/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,robovm/robovm-studio,muntasirsyed/intellij-community,idea4bsd/idea4bsd,ftomassetti/intellij-community,xfournet/intellij-community,diorcety/intellij-community,izonder/intellij-community,MichaelNedzelsky/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,michaelgallacher/intellij-community,gnuhub/intellij-community,allotria/intellij-community,vvv1559/intellij-community,salguarnieri/intellij-community,allotria/intellij-community,orekyuu/intellij-community,ftomassetti/intellij-community,allotria/intellij-community,clumsy/intellij-community,amith01994/intellij-community,gnuhub/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,TangHao1987/intellij-community,wreckJ/intellij-community,vvv1559/intellij-community,orekyuu/intellij-community,ryano144/intellij-community,holmes/intellij-community,nicolargo/intellij-community,vladmm/intellij-community,holmes/intellij-community,fitermay/intellij-community,da1z/intellij-community,salguarnieri/intellij-community,amith01994/intellij-community,nicolargo/intellij-community,salguarnieri/intellij-community,alphafoobar/intellij-community,alphafoobar/intellij-community,caot/intellij-community,ftomassetti/intellij-community,diorcety/intellij-community,fnouama/intellij-community,robovm/robovm-studio,tmpgit/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,ol-loginov/intellij-community,fnouama/intellij-community,ivan-fedorov/intellij-community,ibinti/intellij-community,joewalnes/idea-community,dslomov/intellij-community,Lekanich/intellij-community,pwoodworth/intellij-community,fnouama/intellij-community,slisson/intellij-community,wreckJ/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,clumsy/intellij-community,MichaelNedzelsky/intellij-community,muntasirsyed/intellij-community,retomerz/intellij-community,blademainer/intellij-community,kdwink/intellij-community,Lekanich/intellij-community,orekyuu/intellij-community,ThiagoGarciaAlves/intellij-community,Distrotech/intellij-community,da1z/intellij-community,kool79/intellij-community,slisson/intellij-community,mglukhikh/intellij-community,blademainer/intellij-community,fitermay/intellij-community,caot/intellij-community,MER-GROUP/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,TangHao1987/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,apixandru/intellij-community,xfournet/intellij-community,FHannes/intellij-community,petteyg/intellij-community,fnouama/intellij-community,hurricup/intellij-community,tmpgit/intellij-community,amith01994/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,caot/intellij-community,wreckJ/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,MER-GROUP/intellij-community,diorcety/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,pwoodworth/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,SerCeMan/intellij-community,kdwink/intellij-community,Distrotech/intellij-community,petteyg/intellij-community,orekyuu/intellij-community,idea4bsd/idea4bsd,petteyg/intellij-community,muntasirsyed/intellij-community,signed/intellij-community,slisson/intellij-community,ivan-fedorov/intellij-community,blademainer/intellij-community,michaelgallacher/intellij-community,jexp/idea2,alphafoobar/intellij-community,suncycheng/intellij-community,kool79/intellij-community,orekyuu/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,ahb0327/intellij-community,petteyg/intellij-community,ernestp/consulo,petteyg/intellij-community,ftomassetti/intellij-community,ftomassetti/intellij-community,adedayo/intellij-community,ivan-fedorov/intellij-community,diorcety/intellij-community,kdwink/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,ernestp/consulo,adedayo/intellij-community,holmes/intellij-community,youdonghai/intellij-community,supersven/intellij-community,jagguli/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,consulo/consulo,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,kool79/intellij-community,asedunov/intellij-community,holmes/intellij-community,vladmm/intellij-community,fitermay/intellij-community,jagguli/intellij-community,asedunov/intellij-community,robovm/robovm-studio,idea4bsd/idea4bsd,FHannes/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,pwoodworth/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,suncycheng/intellij-community,lucafavatella/intellij-community,signed/intellij-community,amith01994/intellij-community,caot/intellij-community,supersven/intellij-community,alphafoobar/intellij-community,ibinti/intellij-community,robovm/robovm-studio,izonder/intellij-community,izonder/intellij-community,MER-GROUP/intellij-community,suncycheng/intellij-community,jagguli/intellij-community,adedayo/intellij-community,MichaelNedzelsky/intellij-community,clumsy/intellij-community,Distrotech/intellij-community,suncycheng/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,fengbaicanhe/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,ivan-fedorov/intellij-community,pwoodworth/intellij-community,vladmm/intellij-community,clumsy/intellij-community,samthor/intellij-community,michaelgallacher/intellij-community,amith01994/intellij-community,michaelgallacher/intellij-community,allotria/intellij-community,ol-loginov/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,vvv1559/intellij-community,tmpgit/intellij-community,fitermay/intellij-community,ryano144/intellij-community,lucafavatella/intellij-community,ernestp/consulo,ahb0327/intellij-community,consulo/consulo,robovm/robovm-studio,akosyakov/intellij-community,Lekanich/intellij-community,caot/intellij-community,muntasirsyed/intellij-community,ibinti/intellij-community,xfournet/intellij-community,MER-GROUP/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,dslomov/intellij-community,muntasirsyed/intellij-community,muntasirsyed/intellij-community,signed/intellij-community,dslomov/intellij-community,SerCeMan/intellij-community,SerCeMan/intellij-community,ftomassetti/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,ryano144/intellij-community,ThiagoGarciaAlves/intellij-community,ftomassetti/intellij-community,akosyakov/intellij-community,fengbaicanhe/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community,samthor/intellij-community,SerCeMan/intellij-community,holmes/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,robovm/robovm-studio,michaelgallacher/intellij-community,supersven/intellij-community,joewalnes/idea-community,MER-GROUP/intellij-community,dslomov/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,pwoodworth/intellij-community,samthor/intellij-community,clumsy/intellij-community,Lekanich/intellij-community,asedunov/intellij-community,holmes/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,salguarnieri/intellij-community,ibinti/intellij-community,samthor/intellij-community,slisson/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,kool79/intellij-community,hurricup/intellij-community,gnuhub/intellij-community,alphafoobar/intellij-community,alphafoobar/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,holmes/intellij-community,kdwink/intellij-community,apixandru/intellij-community,amith01994/intellij-community,signed/intellij-community,ivan-fedorov/intellij-community,supersven/intellij-community,ol-loginov/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,lucafavatella/intellij-community,gnuhub/intellij-community,blademainer/intellij-community,fengbaicanhe/intellij-community,izonder/intellij-community,semonte/intellij-community,signed/intellij-community,ahb0327/intellij-community,joewalnes/idea-community,wreckJ/intellij-community,retomerz/intellij-community,ivan-fedorov/intellij-community,jexp/idea2,supersven/intellij-community,Distrotech/intellij-community,Lekanich/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,tmpgit/intellij-community,vladmm/intellij-community,lucafavatella/intellij-community,gnuhub/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,dslomov/intellij-community,lucafavatella/intellij-community,ftomassetti/intellij-community,SerCeMan/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,tmpgit/intellij-community,robovm/robovm-studio,orekyuu/intellij-community,kdwink/intellij-community,apixandru/intellij-community,semonte/intellij-community,jexp/idea2,ftomassetti/intellij-community,kdwink/intellij-community,hurricup/intellij-community,ernestp/consulo,fnouama/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,tmpgit/intellij-community,supersven/intellij-community,allotria/intellij-community,dslomov/intellij-community,akosyakov/intellij-community,samthor/intellij-community,ryano144/intellij-community,fengbaicanhe/intellij-community,nicolargo/intellij-community,fengbaicanhe/intellij-community,fitermay/intellij-community,wreckJ/intellij-community,TangHao1987/intellij-community,MichaelNedzelsky/intellij-community,asedunov/intellij-community,petteyg/intellij-community,adedayo/intellij-community,vladmm/intellij-community,TangHao1987/intellij-community,SerCeMan/intellij-community,ryano144/intellij-community,hurricup/intellij-community,alphafoobar/intellij-community,hurricup/intellij-community,petteyg/intellij-community,wreckJ/intellij-community,apixandru/intellij-community,wreckJ/intellij-community,ol-loginov/intellij-community,hurricup/intellij-community,diorcety/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,ThiagoGarciaAlves/intellij-community,orekyuu/intellij-community,clumsy/intellij-community,lucafavatella/intellij-community,Distrotech/intellij-community,Lekanich/intellij-community,orekyuu/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,slisson/intellij-community,joewalnes/idea-community,ahb0327/intellij-community,akosyakov/intellij-community,salguarnieri/intellij-community,da1z/intellij-community,hurricup/intellij-community,izonder/intellij-community,slisson/intellij-community,ryano144/intellij-community,Lekanich/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,TangHao1987/intellij-community,TangHao1987/intellij-community,kdwink/intellij-community,blademainer/intellij-community,asedunov/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,supersven/intellij-community,nicolargo/intellij-community,clumsy/intellij-community,ol-loginov/intellij-community,holmes/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,caot/intellij-community,caot/intellij-community,wreckJ/intellij-community,lucafavatella/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,da1z/intellij-community,nicolargo/intellij-community,caot/intellij-community,alphafoobar/intellij-community,amith01994/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,SerCeMan/intellij-community,SerCeMan/intellij-community,idea4bsd/idea4bsd,joewalnes/idea-community,ftomassetti/intellij-community,TangHao1987/intellij-community,semonte/intellij-community,gnuhub/intellij-community,supersven/intellij-community,FHannes/intellij-community,orekyuu/intellij-community,allotria/intellij-community,ol-loginov/intellij-community,ol-loginov/intellij-community,hurricup/intellij-community,tmpgit/intellij-community,TangHao1987/intellij-community,FHannes/intellij-community,consulo/consulo,amith01994/intellij-community,samthor/intellij-community,kdwink/intellij-community,ibinti/intellij-community,kdwink/intellij-community,idea4bsd/idea4bsd,vladmm/intellij-community,pwoodworth/intellij-community,ThiagoGarciaAlves/intellij-community,consulo/consulo,kdwink/intellij-community,asedunov/intellij-community,vladmm/intellij-community,ThiagoGarciaAlves/intellij-community,ryano144/intellij-community,adedayo/intellij-community,semonte/intellij-community,diorcety/intellij-community,SerCeMan/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,dslomov/intellij-community,adedayo/intellij-community,xfournet/intellij-community,hurricup/intellij-community,retomerz/intellij-community,samthor/intellij-community,slisson/intellij-community,salguarnieri/intellij-community,kool79/intellij-community,nicolargo/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,signed/intellij-community,adedayo/intellij-community,retomerz/intellij-community,ahb0327/intellij-community,gnuhub/intellij-community,Lekanich/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,pwoodworth/intellij-community,da1z/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,vladmm/intellij-community,muntasirsyed/intellij-community,fitermay/intellij-community,petteyg/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,dslomov/intellij-community,MER-GROUP/intellij-community,jexp/idea2,fnouama/intellij-community,izonder/intellij-community,ernestp/consulo,ahb0327/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,jagguli/intellij-community,signed/intellij-community,ol-loginov/intellij-community,slisson/intellij-community,da1z/intellij-community,FHannes/intellij-community,pwoodworth/intellij-community,xfournet/intellij-community,ivan-fedorov/intellij-community,nicolargo/intellij-community,kool79/intellij-community,petteyg/intellij-community,FHannes/intellij-community,xfournet/intellij-community,Distrotech/intellij-community,amith01994/intellij-community,retomerz/intellij-community,alphafoobar/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,semonte/intellij-community,orekyuu/intellij-community,izonder/intellij-community,jagguli/intellij-community,apixandru/intellij-community,Lekanich/intellij-community,MichaelNedzelsky/intellij-community,ThiagoGarciaAlves/intellij-community,blademainer/intellij-community,diorcety/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,semonte/intellij-community,clumsy/intellij-community,muntasirsyed/intellij-community,ivan-fedorov/intellij-community,ibinti/intellij-community,supersven/intellij-community,fnouama/intellij-community,samthor/intellij-community,TangHao1987/intellij-community,retomerz/intellij-community,slisson/intellij-community,allotria/intellij-community,ahb0327/intellij-community,dslomov/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,joewalnes/idea-community,asedunov/intellij-community,kool79/intellij-community,ryano144/intellij-community,ryano144/intellij-community,diorcety/intellij-community,caot/intellij-community,consulo/consulo,allotria/intellij-community,nicolargo/intellij-community,dslomov/intellij-community,izonder/intellij-community,lucafavatella/intellij-community,holmes/intellij-community,mglukhikh/intellij-community,joewalnes/idea-community,kool79/intellij-community,ahb0327/intellij-community,retomerz/intellij-community,mglukhikh/intellij-community,robovm/robovm-studio,ThiagoGarciaAlves/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,supersven/intellij-community,alphafoobar/intellij-community,vladmm/intellij-community,kdwink/intellij-community,MichaelNedzelsky/intellij-community,dslomov/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,samthor/intellij-community,MER-GROUP/intellij-community,fengbaicanhe/intellij-community,izonder/intellij-community,fengbaicanhe/intellij-community,akosyakov/intellij-community,MER-GROUP/intellij-community,amith01994/intellij-community,signed/intellij-community,salguarnieri/intellij-community,kool79/intellij-community,akosyakov/intellij-community,ivan-fedorov/intellij-community,gnuhub/intellij-community,ahb0327/intellij-community,fitermay/intellij-community,samthor/intellij-community,MER-GROUP/intellij-community,suncycheng/intellij-community,izonder/intellij-community,salguarnieri/intellij-community,blademainer/intellij-community,jagguli/intellij-community,ol-loginov/intellij-community,signed/intellij-community,fengbaicanhe/intellij-community,ftomassetti/intellij-community,fengbaicanhe/intellij-community,signed/intellij-community,ol-loginov/intellij-community,vladmm/intellij-community,wreckJ/intellij-community,Distrotech/intellij-community,amith01994/intellij-community,jexp/idea2,ryano144/intellij-community,clumsy/intellij-community,semonte/intellij-community,fengbaicanhe/intellij-community,FHannes/intellij-community,apixandru/intellij-community,apixandru/intellij-community,TangHao1987/intellij-community,michaelgallacher/intellij-community,akosyakov/intellij-community,blademainer/intellij-community,supersven/intellij-community,jagguli/intellij-community,amith01994/intellij-community,robovm/robovm-studio,fnouama/intellij-community,allotria/intellij-community,Lekanich/intellij-community,holmes/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,tmpgit/intellij-community,joewalnes/idea-community,apixandru/intellij-community,jexp/idea2,vvv1559/intellij-community,nicolargo/intellij-community,jagguli/intellij-community,SerCeMan/intellij-community,caot/intellij-community,youdonghai/intellij-community,ernestp/consulo,ryano144/intellij-community,allotria/intellij-community,adedayo/intellij-community,clumsy/intellij-community,hurricup/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,signed/intellij-community,suncycheng/intellij-community,da1z/intellij-community,fnouama/intellij-community,suncycheng/intellij-community,lucafavatella/intellij-community,MichaelNedzelsky/intellij-community,gnuhub/intellij-community,adedayo/intellij-community,apixandru/intellij-community,ryano144/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,Lekanich/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,muntasirsyed/intellij-community,asedunov/intellij-community,ahb0327/intellij-community,ibinti/intellij-community,FHannes/intellij-community,apixandru/intellij-community,retomerz/intellij-community,joewalnes/idea-community,kool79/intellij-community,nicolargo/intellij-community,ol-loginov/intellij-community,fengbaicanhe/intellij-community,SerCeMan/intellij-community,fnouama/intellij-community,mglukhikh/intellij-community,robovm/robovm-studio,jagguli/intellij-community,tmpgit/intellij-community,slisson/intellij-community,ahb0327/intellij-community,jexp/idea2,ibinti/intellij-community,youdonghai/intellij-community,retomerz/intellij-community,gnuhub/intellij-community,apixandru/intellij-community,izonder/intellij-community,fitermay/intellij-community,retomerz/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,vladmm/intellij-community,semonte/intellij-community,ivan-fedorov/intellij-community,FHannes/intellij-community,ibinti/intellij-community,caot/intellij-community | /*
* Copyright 2000-2005 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.devkit.projectRoots;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.fileChooser.FileChooser;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.projectRoots.AdditionalDataConfigurable;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.projectRoots.SdkModificator;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.ui.DocumentAdapter;
import com.intellij.ui.GuiUtils;
import com.intellij.ui.TextFieldWithStoredHistory;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.idea.devkit.DevKitBundle;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.io.IOException;
/**
* User: anna
* Date: Nov 22, 2004
*/
public class IdeaJdkConfigurable implements AdditionalDataConfigurable {
private JLabel mySandboxHomeLabel = new JLabel(DevKitBundle.message("sandbox.home.label"));
private TextFieldWithStoredHistory mySandboxHome = new TextFieldWithStoredHistory(SANDBOX_HISTORY);
private Sdk myIdeaJdk;
private boolean myModified;
@NonNls private static final String SANDBOX_HISTORY = "DEVKIT_SANDBOX_HISTORY";
public void setSdk(Sdk sdk) {
myIdeaJdk = sdk;
}
public JComponent createComponent() {
mySandboxHome.setHistorySize(5);
JPanel wholePanel = new JPanel(new GridBagLayout());
wholePanel.add(mySandboxHomeLabel, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 0.0, 1.0, GridBagConstraints.NORTHWEST,
GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
wholePanel.add(GuiUtils.constructFieldWithBrowseButton(mySandboxHome,
new ActionListener() {
public void actionPerformed(ActionEvent e) {
FileChooserDescriptor descriptor = FileChooserDescriptorFactory
.createSingleFolderDescriptor();
descriptor.setTitle(DevKitBundle.message("sandbox.home"));
descriptor.setDescription(
DevKitBundle.message("sandbox.purpose"));
VirtualFile[] files = FileChooser.chooseFiles(mySandboxHome, descriptor);
if (files.length != 0) {
mySandboxHome.setText(
FileUtil.toSystemDependentName(files[0].getPath()));
}
myModified = true;
}
}),
new GridBagConstraints(1, GridBagConstraints.RELATIVE, 1, 1, 1.0, 1.0, GridBagConstraints.NORTHWEST,
GridBagConstraints.HORIZONTAL, new Insets(0, 66, 0, 0), 0, 0));
mySandboxHome.addDocumentListener(new DocumentAdapter() {
protected void textChanged(DocumentEvent e) {
myModified = true;
}
});
mySandboxHome.setText("");
myModified = true;
return wholePanel;
}
public boolean isModified() {
return myModified;
}
public void apply() throws ConfigurationException {
if (mySandboxHome.getText() == null || mySandboxHome.getText().length() == 0) {
throw new ConfigurationException(DevKitBundle.message("sandbox.specification"));
}
mySandboxHome.addCurrentTextToHistory();
Sandbox sandbox = new Sandbox(mySandboxHome.getText());
final SdkModificator modificator = myIdeaJdk.getSdkModificator();
modificator.setSdkAdditionalData(sandbox);
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
modificator.commitChanges();
}
});
myModified = false;
}
public void reset() {
mySandboxHome.reset();
if (myIdeaJdk != null && myIdeaJdk.getSdkAdditionalData() instanceof Sandbox) {
final String sandboxHome = ((Sandbox)myIdeaJdk.getSdkAdditionalData()).getSandboxHome();
mySandboxHome.setText(sandboxHome);
mySandboxHome.setSelectedItem(sandboxHome);
myModified = false;
} else {
@NonNls String defaultSandbox = "";
try {
defaultSandbox = new File(PathManager.getConfigPath()).getParentFile().getCanonicalPath() + File.separator + "sandbox";
}
catch (IOException e) {
//can't be on running instance
}
mySandboxHome.setText(defaultSandbox);
}
}
public void disposeUIResources() {
}
}
| plugins/devkit/src/projectRoots/IdeaJdkConfigurable.java | /*
* Copyright 2000-2005 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.devkit.projectRoots;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.fileChooser.FileChooser;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.projectRoots.AdditionalDataConfigurable;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.projectRoots.SdkModificator;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.ui.DocumentAdapter;
import com.intellij.ui.GuiUtils;
import com.intellij.ui.TextFieldWithStoredHistory;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.idea.devkit.DevKitBundle;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
/**
* User: anna
* Date: Nov 22, 2004
*/
public class IdeaJdkConfigurable implements AdditionalDataConfigurable {
private JLabel mySandboxHomeLabel = new JLabel(DevKitBundle.message("sandbox.home.label"));
private TextFieldWithStoredHistory mySandboxHome = new TextFieldWithStoredHistory(SANDBOX_HISTORY);
private Sdk myIdeaJdk;
private boolean myModified;
@NonNls private static final String SANDBOX_HISTORY = "DEVKIT_SANDBOX_HISTORY";
public void setSdk(Sdk sdk) {
myIdeaJdk = sdk;
}
public JComponent createComponent() {
mySandboxHome.setHistorySize(5);
JPanel wholePanel = new JPanel(new GridBagLayout());
wholePanel.add(mySandboxHomeLabel, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 0.0, 1.0, GridBagConstraints.NORTHWEST,
GridBagConstraints.NONE, new Insets(0, 0, 0, 0), 0, 0));
wholePanel.add(GuiUtils.constructFieldWithBrowseButton(mySandboxHome,
new ActionListener() {
public void actionPerformed(ActionEvent e) {
FileChooserDescriptor descriptor = FileChooserDescriptorFactory
.createSingleFolderDescriptor();
descriptor.setTitle(DevKitBundle.message("sandbox.home"));
descriptor.setDescription(
DevKitBundle.message("sandbox.purpose"));
VirtualFile[] files = FileChooser.chooseFiles(mySandboxHome, descriptor);
if (files.length != 0) {
mySandboxHome.setText(
FileUtil.toSystemDependentName(files[0].getPath()));
}
myModified = true;
}
}),
new GridBagConstraints(1, GridBagConstraints.RELATIVE, 1, 1, 1.0, 1.0, GridBagConstraints.NORTHWEST,
GridBagConstraints.HORIZONTAL, new Insets(0, 66, 0, 0), 0, 0));
mySandboxHome.addDocumentListener(new DocumentAdapter() {
protected void textChanged(DocumentEvent e) {
myModified = true;
}
});
mySandboxHome.setText("");
myModified = true;
return wholePanel;
}
public boolean isModified() {
return myModified;
}
public void apply() throws ConfigurationException {
if (mySandboxHome.getText() == null || mySandboxHome.getText().length() == 0) {
throw new ConfigurationException(DevKitBundle.message("sandbox.specification"));
}
mySandboxHome.addCurrentTextToHistory();
Sandbox sandbox = new Sandbox(mySandboxHome.getText());
final SdkModificator modificator = myIdeaJdk.getSdkModificator();
modificator.setSdkAdditionalData(sandbox);
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
modificator.commitChanges();
}
});
myModified = false;
}
public void reset() {
mySandboxHome.reset();
if (myIdeaJdk != null && myIdeaJdk.getSdkAdditionalData() instanceof Sandbox) {
final String sandboxHome = ((Sandbox)myIdeaJdk.getSdkAdditionalData()).getSandboxHome();
mySandboxHome.setText(sandboxHome);
mySandboxHome.setSelectedItem(sandboxHome);
myModified = false;
} else {
mySandboxHome.setText("");
}
}
public void disposeUIResources() {
}
}
| http://www.jetbrains.net/jira/browse/IDEADEV-4023 create default sandbox directory | plugins/devkit/src/projectRoots/IdeaJdkConfigurable.java | http://www.jetbrains.net/jira/browse/IDEADEV-4023 create default sandbox directory |
|
Java | apache-2.0 | 9befe004304cf2fba6ef3ba3184f4a83bf834dec | 0 | benbenw/jmeter,etnetera/jmeter,ham1/jmeter,ham1/jmeter,etnetera/jmeter,ham1/jmeter,etnetera/jmeter,benbenw/jmeter,etnetera/jmeter,benbenw/jmeter,apache/jmeter,ham1/jmeter,apache/jmeter,ham1/jmeter,apache/jmeter,apache/jmeter,benbenw/jmeter,apache/jmeter,etnetera/jmeter | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.gui.util;
import java.awt.Font;
import java.awt.HeadlessException;
import java.io.IOException;
import java.util.Properties;
import org.apache.jmeter.gui.action.LookAndFeelCommand;
import org.apache.jmeter.util.JMeterUtils;
import org.fife.ui.rsyntaxtextarea.RSyntaxTextArea;
import org.fife.ui.rsyntaxtextarea.SyntaxConstants;
import org.fife.ui.rsyntaxtextarea.Theme;
import org.fife.ui.rtextarea.RUndoManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Utility class to handle RSyntaxTextArea code
* It's not currently possible to instantiate the RSyntaxTextArea class when running headless.
* So we use getInstance methods to create the class and allow for headless testing.
*/
public class JSyntaxTextArea extends RSyntaxTextArea {
private static final long serialVersionUID = 211L;
private static final Logger log = LoggerFactory.getLogger(JSyntaxTextArea.class);
private static final Theme DARCULA_THEME = initTheme();
private final Properties languageProperties = JMeterUtils.loadProperties("org/apache/jmeter/gui/util/textarea.properties"); //$NON-NLS-1$
private final boolean disableUndo;
private static final boolean WRAP_STYLE_WORD = JMeterUtils.getPropDefault("jsyntaxtextarea.wrapstyleword", true);
private static final boolean LINE_WRAP = JMeterUtils.getPropDefault("jsyntaxtextarea.linewrap", true);
private static final boolean CODE_FOLDING = JMeterUtils.getPropDefault("jsyntaxtextarea.codefolding", true);
private static final int MAX_UNDOS = JMeterUtils.getPropDefault("jsyntaxtextarea.maxundos", 50);
private static final String USER_FONT_FAMILY = JMeterUtils.getPropDefault("jsyntaxtextarea.font.family", null);
private static final int USER_FONT_SIZE = JMeterUtils.getPropDefault("jsyntaxtextarea.font.size", -1);
/**
* Creates the default syntax highlighting text area. The following are set:
* <ul>
* <li>setSyntaxEditingStyle(SyntaxConstants.SYNTAX_STYLE_JAVA)</li>
* <li>setCodeFoldingEnabled(true)</li>
* <li>setAntiAliasingEnabled(true)</li>
* <li>setLineWrap(true)</li>
* <li>setWrapStyleWord(true)</li>
* </ul>
*
* @param rows
* The number of rows for the text area
* @param cols
* The number of columns for the text area
* @param disableUndo
* true to disable undo manager
* @return {@link JSyntaxTextArea}
*/
public static JSyntaxTextArea getInstance(int rows, int cols, boolean disableUndo) {
try {
JSyntaxTextArea jSyntaxTextArea = new JSyntaxTextArea(rows, cols, disableUndo);
applyTheme(jSyntaxTextArea);
return jSyntaxTextArea;
} catch (HeadlessException e) {
// Allow override for unit testing only
if ("true".equals(System.getProperty("java.awt.headless"))) { // $NON-NLS-1$ $NON-NLS-2$
return new JSyntaxTextArea(disableUndo) {
private static final long serialVersionUID = 1L;
@Override
protected void init() {
try {
super.init();
} catch (HeadlessException|NullPointerException e) {
// ignored
}
}
// Override methods that would fail
@Override
public void setCodeFoldingEnabled(boolean b) { }
@Override
public void setCaretPosition(int b) { }
@Override
public void discardAllEdits() { }
@Override
public boolean isCodeFoldingEnabled(){ return true; }
};
} else {
throw e;
}
}
}
/**
* Apply XML Theme to syntax text area
* @param jSyntaxTextArea
*/
private static void applyTheme(JSyntaxTextArea jSyntaxTextArea) {
String laf = LookAndFeelCommand.getJMeterLaf();
if(JMeterMenuBar.DARCULA_LAF_CLASS.equals(laf)) {
DARCULA_THEME.apply(jSyntaxTextArea);
}
}
/**
* Creates the default syntax highlighting text area. The following are set:
* <ul>
* <li>setSyntaxEditingStyle(SyntaxConstants.SYNTAX_STYLE_JAVA)</li>
* <li>setCodeFoldingEnabled(true)</li>
* <li>setAntiAliasingEnabled(true)</li>
* <li>setLineWrap(true)</li>
* <li>setWrapStyleWord(true)</li>
* </ul>
*
* @param rows
* The number of rows for the text area
* @param cols
* The number of columns for the text area
* @return {@link JSyntaxTextArea}
*/
public static JSyntaxTextArea getInstance(int rows, int cols) {
return getInstance(rows, cols, false);
}
@Deprecated
public JSyntaxTextArea() {
// For use by test code only
this(30, 50, false);
}
// for use by headless tests only
private JSyntaxTextArea(boolean dummy) {
disableUndo = dummy;
}
/**
* Creates the default syntax highlighting text area. The following are set:
* <ul>
* <li>setSyntaxEditingStyle(SyntaxConstants.SYNTAX_STYLE_JAVA)</li>
* <li>setCodeFoldingEnabled(true)</li>
* <li>setAntiAliasingEnabled(true)</li>
* <li>setLineWrap(true)</li>
* <li>setWrapStyleWord(true)</li>
* </ul>
*
* @param rows
* The number of rows for the text area
* @param cols
* The number of columns for the text area
* @deprecated use {@link #getInstance(int, int)} instead
*/
@Deprecated
public JSyntaxTextArea(int rows, int cols) {
this(rows, cols, false);
}
/**
* Creates the default syntax highlighting text area. The following are set:
* <ul>
* <li>setSyntaxEditingStyle(SyntaxConstants.SYNTAX_STYLE_JAVA)</li>
* <li>setCodeFoldingEnabled(true)</li>
* <li>setAntiAliasingEnabled(true)</li>
* <li>setLineWrap(true)</li>
* <li>setWrapStyleWord(true)</li>
* </ul>
*
* @param rows
* The number of rows for the text area
* @param cols
* The number of columns for the text area
* @param disableUndo
* true to disable undo manager, defaults to false
* @deprecated use {@link #getInstance(int, int, boolean)} instead
*/
@Deprecated
public JSyntaxTextArea(int rows, int cols, boolean disableUndo) {
super(rows, cols);
super.setSyntaxEditingStyle(SyntaxConstants.SYNTAX_STYLE_JAVA);
super.setCodeFoldingEnabled(CODE_FOLDING);
super.setAntiAliasingEnabled(true);
super.setLineWrap(LINE_WRAP);
super.setWrapStyleWord(WRAP_STYLE_WORD);
this.disableUndo = disableUndo;
if (USER_FONT_FAMILY != null) {
int fontSize = USER_FONT_SIZE > 0 ? USER_FONT_SIZE : getFont().getSize();
setFont(new Font(USER_FONT_FAMILY, Font.PLAIN, fontSize));
if (log.isDebugEnabled()) {
log.debug("Font is set to: {}", getFont());
}
}
if(disableUndo) {
// We need to do this to force recreation of undoManager which
// will use the disableUndo otherwise it would always be false
// See BUG 57440
discardAllEdits();
}
}
/**
* Sets the language of the text area.
*
* @param language
* The language to be set
*/
public void setLanguage(String language) {
if(language == null) {
// TODO: Log a message?
// But how to find the name of the offending GUI element in the case of a TestBean?
super.setSyntaxEditingStyle(SyntaxConstants.SYNTAX_STYLE_NONE);
} else {
final String style = languageProperties.getProperty(language);
if (style == null) {
super.setSyntaxEditingStyle(SyntaxConstants.SYNTAX_STYLE_NONE);
} else {
super.setSyntaxEditingStyle(style);
}
}
}
/**
* Override UndoManager to allow disabling if feature causes issues
* See <a href="https://github.com/bobbylight/RSyntaxTextArea/issues/19">Issue 19 on RSyntaxTextArea</a>
*/
@Override
protected RUndoManager createUndoManager() {
RUndoManager undoManager = super.createUndoManager();
if(disableUndo) {
undoManager.setLimit(0);
} else {
undoManager.setLimit(MAX_UNDOS);
}
return undoManager;
}
/**
* Sets initial text resetting undo history
*
* @param string
* The initial text to be set
*/
public void setInitialText(String string) {
setText(string);
discardAllEdits();
}
private static final Theme initTheme() {
try {
return Theme.load(JSyntaxTextArea.class.getClassLoader().getResourceAsStream(
"org/apache/jmeter/gui/util/theme/darcula_theme.xml"));
} catch (IOException e) {
log.error("Error reading darcula_theme for JSyntaxTextArea", e);
return null;
}
}
}
| src/core/org/apache/jmeter/gui/util/JSyntaxTextArea.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.gui.util;
import java.awt.Font;
import java.awt.HeadlessException;
import java.io.IOException;
import java.util.Properties;
import org.apache.jmeter.gui.action.LookAndFeelCommand;
import org.apache.jmeter.util.JMeterUtils;
import org.fife.ui.rsyntaxtextarea.RSyntaxTextArea;
import org.fife.ui.rsyntaxtextarea.SyntaxConstants;
import org.fife.ui.rsyntaxtextarea.Theme;
import org.fife.ui.rtextarea.RUndoManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Utility class to handle RSyntaxTextArea code
* It's not currently possible to instantiate the RSyntaxTextArea class when running headless.
* So we use getInstance methods to create the class and allow for headless testing.
*/
public class JSyntaxTextArea extends RSyntaxTextArea {
private static final long serialVersionUID = 211L;
private static final Logger log = LoggerFactory.getLogger(JSyntaxTextArea.class);
private static final Theme DARCULA_THEME = initTheme();
private final Properties languageProperties = JMeterUtils.loadProperties("org/apache/jmeter/gui/util/textarea.properties"); //$NON-NLS-1$
private final boolean disableUndo;
private static final boolean WRAP_STYLE_WORD = JMeterUtils.getPropDefault("jsyntaxtextarea.wrapstyleword", true);
private static final boolean LINE_WRAP = JMeterUtils.getPropDefault("jsyntaxtextarea.linewrap", true);
private static final boolean CODE_FOLDING = JMeterUtils.getPropDefault("jsyntaxtextarea.codefolding", true);
private static final int MAX_UNDOS = JMeterUtils.getPropDefault("jsyntaxtextarea.maxundos", 50);
private static final String USER_FONT_FAMILY = JMeterUtils.getPropDefault("jsyntaxtextarea.font.family", null);
private static final int USER_FONT_SIZE = JMeterUtils.getPropDefault("jsyntaxtextarea.font.size", -1);
/**
* Creates the default syntax highlighting text area. The following are set:
* <ul>
* <li>setSyntaxEditingStyle(SyntaxConstants.SYNTAX_STYLE_JAVA)</li>
* <li>setCodeFoldingEnabled(true)</li>
* <li>setAntiAliasingEnabled(true)</li>
* <li>setLineWrap(true)</li>
* <li>setWrapStyleWord(true)</li>
* </ul>
*
* @param rows
* The number of rows for the text area
* @param cols
* The number of columns for the text area
* @param disableUndo
* true to disable undo manager
* @return {@link JSyntaxTextArea}
*/
public static JSyntaxTextArea getInstance(int rows, int cols, boolean disableUndo) {
try {
JSyntaxTextArea jSyntaxTextArea = new JSyntaxTextArea(rows, cols, disableUndo);
applyTheme(jSyntaxTextArea);
return jSyntaxTextArea;
} catch (HeadlessException e) {
// Allow override for unit testing only
if ("true".equals(System.getProperty("java.awt.headless"))) { // $NON-NLS-1$ $NON-NLS-2$
return new JSyntaxTextArea(disableUndo) {
private static final long serialVersionUID = 1L;
private String text;
@Override
protected void init() {
try {
super.init();
} catch (HeadlessException|NullPointerException e) {
// ignored
}
}
// Override methods that would fail
@Override
public void setCodeFoldingEnabled(boolean b) { }
@Override
public void setCaretPosition(int b) { }
@Override
public void discardAllEdits() { }
@Override
public void setText(String t) {
this.text = t;
}
@Override
public String getText() {
return text;
}
@Override
public boolean isCodeFoldingEnabled(){ return true; }
};
} else {
throw e;
}
}
}
/**
* Apply XML Theme to syntax text area
* @param jSyntaxTextArea
*/
private static void applyTheme(JSyntaxTextArea jSyntaxTextArea) {
String laf = LookAndFeelCommand.getJMeterLaf();
if(JMeterMenuBar.DARCULA_LAF_CLASS.equals(laf)) {
DARCULA_THEME.apply(jSyntaxTextArea);
}
}
/**
* Creates the default syntax highlighting text area. The following are set:
* <ul>
* <li>setSyntaxEditingStyle(SyntaxConstants.SYNTAX_STYLE_JAVA)</li>
* <li>setCodeFoldingEnabled(true)</li>
* <li>setAntiAliasingEnabled(true)</li>
* <li>setLineWrap(true)</li>
* <li>setWrapStyleWord(true)</li>
* </ul>
*
* @param rows
* The number of rows for the text area
* @param cols
* The number of columns for the text area
* @return {@link JSyntaxTextArea}
*/
public static JSyntaxTextArea getInstance(int rows, int cols) {
return getInstance(rows, cols, false);
}
@Deprecated
public JSyntaxTextArea() {
// For use by test code only
this(30, 50, false);
}
// for use by headless tests only
private JSyntaxTextArea(boolean dummy) {
disableUndo = dummy;
}
/**
* Creates the default syntax highlighting text area. The following are set:
* <ul>
* <li>setSyntaxEditingStyle(SyntaxConstants.SYNTAX_STYLE_JAVA)</li>
* <li>setCodeFoldingEnabled(true)</li>
* <li>setAntiAliasingEnabled(true)</li>
* <li>setLineWrap(true)</li>
* <li>setWrapStyleWord(true)</li>
* </ul>
*
* @param rows
* The number of rows for the text area
* @param cols
* The number of columns for the text area
* @deprecated use {@link #getInstance(int, int)} instead
*/
@Deprecated
public JSyntaxTextArea(int rows, int cols) {
this(rows, cols, false);
}
/**
* Creates the default syntax highlighting text area. The following are set:
* <ul>
* <li>setSyntaxEditingStyle(SyntaxConstants.SYNTAX_STYLE_JAVA)</li>
* <li>setCodeFoldingEnabled(true)</li>
* <li>setAntiAliasingEnabled(true)</li>
* <li>setLineWrap(true)</li>
* <li>setWrapStyleWord(true)</li>
* </ul>
*
* @param rows
* The number of rows for the text area
* @param cols
* The number of columns for the text area
* @param disableUndo
* true to disable undo manager, defaults to false
* @deprecated use {@link #getInstance(int, int, boolean)} instead
*/
@Deprecated
public JSyntaxTextArea(int rows, int cols, boolean disableUndo) {
super(rows, cols);
super.setSyntaxEditingStyle(SyntaxConstants.SYNTAX_STYLE_JAVA);
super.setCodeFoldingEnabled(CODE_FOLDING);
super.setAntiAliasingEnabled(true);
super.setLineWrap(LINE_WRAP);
super.setWrapStyleWord(WRAP_STYLE_WORD);
this.disableUndo = disableUndo;
if (USER_FONT_FAMILY != null) {
int fontSize = USER_FONT_SIZE > 0 ? USER_FONT_SIZE : getFont().getSize();
setFont(new Font(USER_FONT_FAMILY, Font.PLAIN, fontSize));
if (log.isDebugEnabled()) {
log.debug("Font is set to: {}", getFont());
}
}
if(disableUndo) {
// We need to do this to force recreation of undoManager which
// will use the disableUndo otherwise it would always be false
// See BUG 57440
discardAllEdits();
}
}
/**
* Sets the language of the text area.
*
* @param language
* The language to be set
*/
public void setLanguage(String language) {
if(language == null) {
// TODO: Log a message?
// But how to find the name of the offending GUI element in the case of a TestBean?
super.setSyntaxEditingStyle(SyntaxConstants.SYNTAX_STYLE_NONE);
} else {
final String style = languageProperties.getProperty(language);
if (style == null) {
super.setSyntaxEditingStyle(SyntaxConstants.SYNTAX_STYLE_NONE);
} else {
super.setSyntaxEditingStyle(style);
}
}
}
/**
* Override UndoManager to allow disabling if feature causes issues
* See <a href="https://github.com/bobbylight/RSyntaxTextArea/issues/19">Issue 19 on RSyntaxTextArea</a>
*/
@Override
protected RUndoManager createUndoManager() {
RUndoManager undoManager = super.createUndoManager();
if(disableUndo) {
undoManager.setLimit(0);
} else {
undoManager.setLimit(MAX_UNDOS);
}
return undoManager;
}
/**
* Sets initial text resetting undo history
*
* @param string
* The initial text to be set
*/
public void setInitialText(String string) {
setText(string);
discardAllEdits();
}
private static final Theme initTheme() {
try {
return Theme.load(JSyntaxTextArea.class.getClassLoader().getResourceAsStream(
"org/apache/jmeter/gui/util/theme/darcula_theme.xml"));
} catch (IOException e) {
log.error("Error reading darcula_theme for JSyntaxTextArea", e);
return null;
}
}
}
| Revert commit r1817421
git-svn-id: https://svn.apache.org/repos/asf/jmeter/trunk@1817433 13f79535-47bb-0310-9956-ffa450edef68
Former-commit-id: c7d92298db572c7c1c5d0ce0d4715c4cd24cf4f3 | src/core/org/apache/jmeter/gui/util/JSyntaxTextArea.java | Revert commit r1817421 |
|
Java | apache-2.0 | 66c672ff26fb5345bffec11f37bc19e29ab6856d | 0 | Wisser/Jailer,Wisser/Jailer,Wisser/Jailer | /*
* Copyright 2007 - 2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sf.jailer.ui;
import java.awt.Color;
import java.awt.Component;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import javax.swing.DefaultCellEditor;
import javax.swing.JCheckBox;
import javax.swing.JLabel;
import javax.swing.JTable;
import javax.swing.ListSelectionModel;
import javax.swing.RowSorter.SortKey;
import javax.swing.SortOrder;
import javax.swing.SwingConstants;
import javax.swing.SwingUtilities;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.table.DefaultTableCellRenderer;
import javax.swing.table.DefaultTableModel;
import javax.swing.table.TableColumn;
import javax.swing.table.TableModel;
import javax.swing.table.TableRowSorter;
import net.sf.jailer.datamodel.Association;
import net.sf.jailer.datamodel.DataModel;
import net.sf.jailer.datamodel.Table;
/**
* Pending Decisions Panel.
*
* @author Ralf Wisser
*/
@SuppressWarnings("serial")
public abstract class PendingDecisionsPanel extends javax.swing.JPanel {
private final DataModel dataModel;
private final ExtractionModelEditor extractionModelEditor;
private List<Association> atBorder = new ArrayList<Association>();
private Set<Association> checked = new HashSet<Association>();
/**
* Creates new form PendingDecisionsPanel
*/
public PendingDecisionsPanel(DataModel dataModel, ExtractionModelEditor extractionModelEditor) {
this.dataModel = dataModel;
this.extractionModelEditor = extractionModelEditor;
initComponents();
DefaultTableCellRenderer associationsListItemRenderer = new DefaultTableCellRenderer() {
final Color BG_SELCOLOR = new Color(0.45f, 0.85f, 1.0f);
final Color BG1 = new Color(255, 255, 255);
final Color BG2 = new Color(240, 255, 255);
@Override
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected,
boolean hasFocus, int row, int column) {
if (value == null) {
value = "";
}
if (column == 4) {
isSelected = false;
}
Component render = super.getTableCellRendererComponent(table, value, isSelected, false, row, column);
if (value instanceof Boolean) {
JCheckBox checkBox = new JCheckBox(" ");
checkBox.setHorizontalAlignment(SwingConstants.CENTER);
checkBox.setSelected(Boolean.TRUE.equals(value));
checkBox.setOpaque(true);
render = checkBox;
}
render.setBackground(isSelected ? BG_SELCOLOR : (row % 2 == 0) ? BG1 : BG2);
render.setForeground(Color.BLACK);
if (render instanceof JLabel) {
((JLabel) render).setToolTipText(UIUtil.toHTML(String.valueOf(value), 100));
}
return render;
}
private static final long serialVersionUID = -6057505075587930064L;
};
associationsTable.setModel(createAssociationsListModel());
associationsTable.setDefaultRenderer(Object.class, associationsListItemRenderer);
associationsTable.setDefaultRenderer(Boolean.class, associationsListItemRenderer);
associationsTable.getColumnModel().getColumn(4).setCellRenderer(associationsListItemRenderer);
associationsTable.setAutoCreateRowSorter(true);
associationsTable.setShowVerticalLines(false);
associationsTable.setShowHorizontalLines(false);
associationsTable.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
associationsTable.getSelectionModel().addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent e) {
if (associationsTable.getSelectedRow() >= 0) {
if (!ignoreSelection) {
select(atBorder.get(associationsTable.getRowSorter().convertRowIndexToModel(associationsTable.getSelectedRow())));
}
}
}
});
initRowSorter(associationsTable);
updateView();
}
private boolean activated = false;
private boolean updatePending = false;
private boolean ignoreSelection = false;
void updateView() {
if (updatePending) {
return;
}
updatePending = true;
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
try {
Association selected = null;
if (associationsTable.getSelectedRow() >= 0) {
selected = atBorder.get(associationsTable.getRowSorter().convertRowIndexToModel(associationsTable.getSelectedRow()));
}
for (Iterator<Association> i = atBorder.iterator(); i.hasNext(); ) {
if (!checked.contains(i.next())) {
i.remove();
}
}
Set<Association> atBorderSet = new HashSet<Association>(atBorder);
Set<Table> closure = extractionModelEditor.getCurrentSubjectClosure();
for (String a : dataModel.decisionPending) {
Association association = dataModel.namedAssociations.get(a);
if (association != null) {
if (closure.contains(association.source) || closure.contains(association.destination)) {
if (!atBorderSet.contains(association.reversalAssociation)) {
atBorder.add(association.reversalAssociation);
atBorderSet.add(association.reversalAssociation);
}
}
}
}
Collections.sort(atBorder, new Comparator<Association>() {
@Override
public int compare(Association o1, Association o2) {
int c = dataModel.getDisplayName(o1.source).compareTo(dataModel.getDisplayName(o2.source));
if (c != 0) {
return c;
}
c = dataModel.getDisplayName(o1.destination).compareTo(dataModel.getDisplayName(o2.destination));
if (c != 0) {
return c;
}
return o1.getName().compareTo(o2.getName());
}
});
List<? extends SortKey> sortKeys = new ArrayList<>(associationsTable.getRowSorter().getSortKeys());
associationsTable.setModel(createAssociationsListModel());
try {
associationsTable.getRowSorter().setSortKeys(sortKeys);
} catch (Throwable t) {
// ignore
}
selectAssociation(selected);
adjustTableColumnsWidth(associationsTable);
boolean hasChecked = false;
for (Association association: atBorder) {
if (checked.contains(association)) {
hasChecked = true;
break;
}
}
checkAllButton.setEnabled(!checked.containsAll(atBorder));
uncheckAllButton.setEnabled(hasChecked);
clearButton.setEnabled(hasChecked);
if (checked.containsAll(atBorder)) {
if (activated) {
deactivate();
activated = false;
}
} else {
if (!activated) {
activate();
activated = true;
}
}
} finally {
updatePending = false;
}
}
});
}
private void initRowSorter(JTable table) {
TableRowSorter<TableModel> sorter = new TableRowSorter<TableModel>(table.getModel()) {
@Override
protected boolean useToString(int column) {
return false;
}
@Override
public void toggleSortOrder(int column) {
List<? extends SortKey> sortKeys = getSortKeys();
if (sortKeys.size() > 0) {
if (sortKeys.get(0).getSortOrder() == SortOrder.DESCENDING) {
setSortKeys(null);
return;
}
}
super.toggleSortOrder(column);
}
};
table.setRowSorter(sorter);
}
public void adjustTableColumnsWidth(JTable table) {
DefaultTableModel dtm = (DefaultTableModel) table.getModel();
DefaultTableCellRenderer defaultTableCellRenderer = new DefaultTableCellRenderer();
for (int i = 0; i < table.getColumnCount(); i++) {
TableColumn column = table.getColumnModel().getColumn(i);
Component comp = defaultTableCellRenderer.getTableCellRendererComponent(table, column.getHeaderValue(),
false, false, 0, i);
int width = 1;
width = Math.max(width, comp.getPreferredSize().width);
int line = 0;
for (; line < table.getRowCount(); ++line) {
comp = table.getCellRenderer(line, i).getTableCellRendererComponent(table, dtm.getValueAt(line, i),
false, false, line, i);
width = Math.max(width, comp.getPreferredSize().width);
}
if (i >= 2) {
width = Math.min(width, 200);
}
column.setPreferredWidth(width);
}
table.getColumnModel().getColumn(table.getColumnModel().getColumnCount() - 1).setPreferredWidth(120);
}
/**
* Creates model for associations-list component.
*
* @return model for associations-list component
*/
private TableModel createAssociationsListModel() {
DefaultTableModel associationsTableModel = new DefaultTableModel(
new Object[] { "From", "To", "Condition", "Name", "Checked" }, 0) {
@Override
public boolean isCellEditable(int row, int column) {
return column == 4;
}
@Override
public Class<?> getColumnClass(int columnIndex) {
if(columnIndex == 4) {
return Boolean.class;
}
return super.getColumnClass(columnIndex);
}
@Override
public void setValueAt(Object aValue, int row, int column) {
super.setValueAt(aValue, row, column);
final Association association = atBorder.get(row);
if (Boolean.TRUE.equals(aValue)) {
decisionMade(association);
} else {
decisionUndone(association);
}
}
};
final JCheckBox checkBox = new JCheckBox(" ");
checkBox.setOpaque(true);
checkBox.setHorizontalAlignment(SwingConstants.CENTER);
DefaultCellEditor anEditor = new DefaultCellEditor(checkBox);
anEditor.setClickCountToStart(1);
associationsTable.setDefaultEditor(Boolean.class, anEditor);
for (Association a: atBorder) {
String name = a.reversalAssociation.getName();
associationsTableModel.addRow(
new Object[] {
dataModel.getDisplayName(a.source),
dataModel.getDisplayName(a.destination),
a.getUnrestrictedJoinCondition(),
name,
checked.contains(a) });
}
return associationsTableModel;
}
private void select(Association association) {
if (!extractionModelEditor.select(association)) {
extractionModelEditor.setRootSelection(association.destination);
extractionModelEditor.select(association);
}
}
/**
* Removes an association from "pendingDecisions" list.
*
* @param association the association
*/
public void decisionMade(Association association) {
checked.add(association);
dataModel.decisionPending.remove(association.reversalAssociation.getName());
decisionMade = true;
markDirty();
updateView();
}
/**
* Adds an association to "pendingDecisions" list.
*
* @param association the association
*/
public void decisionUndone(Association association) {
dataModel.decisionPending.add(association.reversalAssociation.getName());
checked.remove(association);
markDirty();
updateView();
}
protected boolean decisionMade = false;
protected abstract void toggleDockingState();
protected abstract void activate();
protected abstract void deactivate();
protected abstract void markDirty();
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
// <editor-fold defaultstate="collapsed" desc="Generated
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
java.awt.GridBagConstraints gridBagConstraints;
jPanel1 = new javax.swing.JPanel();
jScrollPane2 = new javax.swing.JScrollPane();
associationsTable = new javax.swing.JTable();
jPanel2 = new javax.swing.JPanel();
dockButton = new javax.swing.JButton();
jSeparator1 = new javax.swing.JSeparator();
checkAllButton = new javax.swing.JButton();
uncheckAllButton = new javax.swing.JButton();
jSeparator2 = new javax.swing.JSeparator();
clearButton = new javax.swing.JButton();
infoPanel = new javax.swing.JPanel();
jLabel1 = new javax.swing.JLabel();
setLayout(new java.awt.GridLayout(1, 0));
jPanel1.setLayout(new java.awt.GridBagLayout());
associationsTable.setAutoCreateRowSorter(true);
associationsTable.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null}
},
new String [] {
"Title 1", "Title 2", "Title 3", "Title 4"
}
));
jScrollPane2.setViewportView(associationsTable);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 1;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.weighty = 1.0;
jPanel1.add(jScrollPane2, gridBagConstraints);
jPanel2.setLayout(new java.awt.GridBagLayout());
dockButton.setText("Dock");
dockButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
dockButtonActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 1;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTH;
gridBagConstraints.weightx = 1.0;
jPanel2.add(dockButton, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.insets = new java.awt.Insets(2, 0, 2, 0);
jPanel2.add(jSeparator1, gridBagConstraints);
checkAllButton.setText("Check all");
checkAllButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
checkAllButtonActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 5;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTH;
gridBagConstraints.weightx = 1.0;
jPanel2.add(checkAllButton, gridBagConstraints);
uncheckAllButton.setText("Uncheck all");
uncheckAllButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
uncheckAllButtonActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 8;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTH;
gridBagConstraints.weightx = 1.0;
jPanel2.add(uncheckAllButton, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.insets = new java.awt.Insets(2, 0, 2, 0);
jPanel2.add(jSeparator2, gridBagConstraints);
clearButton.setText("Clear");
clearButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
clearButtonActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 14;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTH;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.weighty = 1.0;
jPanel2.add(clearButton, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 1;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
jPanel1.add(jPanel2, gridBagConstraints);
infoPanel.setBackground(new java.awt.Color(250, 255, 255));
infoPanel.setBorder(new javax.swing.border.SoftBevelBorder(javax.swing.border.BevelBorder.LOWERED));
infoPanel.setLayout(new java.awt.GridBagLayout());
jLabel1.setForeground(new java.awt.Color(0, 0, 62));
jLabel1.setText("<html>\nThis tool allows you to find and edit the newly added associations if the data model has been extended after the last change to this extraction model. <br>\nSelect associations here in the table and define restrictions, or check off an association if you're comfortable with the given restriction.\n</html>");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.weighty = 1.0;
gridBagConstraints.insets = new java.awt.Insets(2, 2, 2, 2);
infoPanel.add(jLabel1, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 0;
gridBagConstraints.gridwidth = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.insets = new java.awt.Insets(2, 2, 2, 2);
jPanel1.add(infoPanel, gridBagConstraints);
add(jPanel1);
}// </editor-fold>//GEN-END:initComponents
private void dockButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_dockButtonActionPerformed
toggleDockingState();
}//GEN-LAST:event_dockButtonActionPerformed
private void checkAllButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_checkAllButtonActionPerformed
List<Association> associations = new ArrayList<Association>();
for (Association association: atBorder) {
if (!checked.contains(association)) {
associations.add(association);
}
}
for (Association association: associations) {
decisionMade(association);
}
}//GEN-LAST:event_checkAllButtonActionPerformed
private void uncheckAllButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_uncheckAllButtonActionPerformed
List<Association> associations = new ArrayList<Association>();
Set<Association> atBorderSet = new HashSet<Association>(atBorder);
for (Association association: checked) {
if (atBorderSet.contains(association)) {
associations.add(association);
}
}
for (Association association: associations) {
decisionUndone(association);
}
}//GEN-LAST:event_uncheckAllButtonActionPerformed
private void clearButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_clearButtonActionPerformed
checked.clear();
updateView();
}//GEN-LAST:event_clearButtonActionPerformed
public void selectAssociation(Association selected) {
if (selected != null) {
int i = atBorder.indexOf(selected);
if (i < 0) {
i = atBorder.indexOf(selected.reversalAssociation);
}
if (i >= 0) {
try {
ignoreSelection = true;
i = associationsTable.getRowSorter().convertRowIndexToView(i);
associationsTable.getSelectionModel().setSelectionInterval(i, i);
} finally {
ignoreSelection = false;
}
}
}
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JTable associationsTable;
protected javax.swing.JButton checkAllButton;
protected javax.swing.JButton clearButton;
protected javax.swing.JButton dockButton;
protected javax.swing.JPanel infoPanel;
private javax.swing.JLabel jLabel1;
private javax.swing.JPanel jPanel1;
private javax.swing.JPanel jPanel2;
private javax.swing.JScrollPane jScrollPane2;
private javax.swing.JSeparator jSeparator1;
private javax.swing.JSeparator jSeparator2;
protected javax.swing.JButton uncheckAllButton;
// End of variables declaration//GEN-END:variables
}
| src/main/gui/net/sf/jailer/ui/PendingDecisionsPanel.java | /*
* Copyright 2007 - 2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sf.jailer.ui;
import java.awt.Color;
import java.awt.Component;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import javax.swing.DefaultCellEditor;
import javax.swing.JCheckBox;
import javax.swing.JLabel;
import javax.swing.JTable;
import javax.swing.ListSelectionModel;
import javax.swing.RowSorter.SortKey;
import javax.swing.SortOrder;
import javax.swing.SwingConstants;
import javax.swing.SwingUtilities;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.table.DefaultTableCellRenderer;
import javax.swing.table.DefaultTableModel;
import javax.swing.table.TableColumn;
import javax.swing.table.TableModel;
import javax.swing.table.TableRowSorter;
import net.sf.jailer.datamodel.Association;
import net.sf.jailer.datamodel.DataModel;
import net.sf.jailer.datamodel.Table;
/**
* Pending Decisions Panel.
*
* @author Ralf Wisser
*/
@SuppressWarnings("serial")
public abstract class PendingDecisionsPanel extends javax.swing.JPanel {
private final DataModel dataModel;
private final ExtractionModelEditor extractionModelEditor;
private List<Association> atBorder = new ArrayList<Association>();
private Set<Association> checked = new HashSet<Association>();
/**
* Creates new form PendingDecisionsPanel
*/
public PendingDecisionsPanel(DataModel dataModel, ExtractionModelEditor extractionModelEditor) {
this.dataModel = dataModel;
this.extractionModelEditor = extractionModelEditor;
initComponents();
DefaultTableCellRenderer associationsListItemRenderer = new DefaultTableCellRenderer() {
final Color BG_SELCOLOR = new Color(0.45f, 0.85f, 1.0f);
final Color BG1 = new Color(255, 255, 255);
final Color BG2 = new Color(240, 255, 255);
@Override
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected,
boolean hasFocus, int row, int column) {
if (value == null) {
value = "";
}
if (column == 4) {
isSelected = false;
}
Component render = super.getTableCellRendererComponent(table, value, isSelected, false, row, column);
if (value instanceof Boolean) {
JCheckBox checkBox = new JCheckBox(" ");
checkBox.setHorizontalAlignment(SwingConstants.CENTER);
checkBox.setSelected(Boolean.TRUE.equals(value));
checkBox.setOpaque(true);
render = checkBox;
}
render.setBackground(isSelected ? BG_SELCOLOR : (row % 2 == 0) ? BG1 : BG2);
render.setForeground(Color.BLACK);
if (render instanceof JLabel) {
((JLabel) render).setToolTipText(UIUtil.toHTML(String.valueOf(value), 100));
}
return render;
}
private static final long serialVersionUID = -6057505075587930064L;
};
associationsTable.setModel(createAssociationsListModel());
associationsTable.setDefaultRenderer(Object.class, associationsListItemRenderer);
associationsTable.setDefaultRenderer(Boolean.class, associationsListItemRenderer);
associationsTable.getColumnModel().getColumn(4).setCellRenderer(associationsListItemRenderer);
associationsTable.setAutoCreateRowSorter(true);
associationsTable.setShowVerticalLines(false);
associationsTable.setShowHorizontalLines(false);
associationsTable.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
associationsTable.getSelectionModel().addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent e) {
if (associationsTable.getSelectedRow() >= 0) {
if (!ignoreSelection) {
select(atBorder.get(associationsTable.getRowSorter().convertRowIndexToModel(associationsTable.getSelectedRow())));
}
}
}
});
initRowSorter(associationsTable);
updateView();
}
private boolean activated = false;
private boolean updatePending = false;
private boolean ignoreSelection = false;
void updateView() {
if (updatePending) {
return;
}
updatePending = true;
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
try {
Association selected = null;
if (associationsTable.getSelectedRow() >= 0) {
selected = atBorder.get(associationsTable.getRowSorter().convertRowIndexToModel(associationsTable.getSelectedRow()));
}
for (Iterator<Association> i = atBorder.iterator(); i.hasNext(); ) {
if (!checked.contains(i.next())) {
i.remove();
}
}
Set<Association> atBorderSet = new HashSet<Association>(atBorder);
Set<Table> closure = extractionModelEditor.getCurrentSubjectClosure();
for (String a : dataModel.decisionPending) {
Association association = dataModel.namedAssociations.get(a);
if (association != null) {
if (closure.contains(association.source) || closure.contains(association.destination)) {
if (!atBorderSet.contains(association.reversalAssociation)) {
atBorder.add(association.reversalAssociation);
atBorderSet.add(association.reversalAssociation);
}
}
}
}
atBorder.sort(new Comparator<Association>() {
@Override
public int compare(Association o1, Association o2) {
int c = dataModel.getDisplayName(o1.source).compareTo(dataModel.getDisplayName(o2.source));
if (c != 0) {
return c;
}
c = dataModel.getDisplayName(o1.destination).compareTo(dataModel.getDisplayName(o2.destination));
if (c != 0) {
return c;
}
return o1.getName().compareTo(o2.getName());
}
});
List<? extends SortKey> sortKeys = new ArrayList<>(associationsTable.getRowSorter().getSortKeys());
associationsTable.setModel(createAssociationsListModel());
try {
associationsTable.getRowSorter().setSortKeys(sortKeys);
} catch (Throwable t) {
// ignore
}
selectAssociation(selected);
adjustTableColumnsWidth(associationsTable);
boolean hasChecked = false;
for (Association association: atBorder) {
if (checked.contains(association)) {
hasChecked = true;
break;
}
}
checkAllButton.setEnabled(!checked.containsAll(atBorder));
uncheckAllButton.setEnabled(hasChecked);
clearButton.setEnabled(hasChecked);
if (checked.containsAll(atBorder)) {
if (activated) {
deactivate();
activated = false;
}
} else {
if (!activated) {
activate();
activated = true;
}
}
} finally {
updatePending = false;
}
}
});
}
private void initRowSorter(JTable table) {
TableRowSorter<TableModel> sorter = new TableRowSorter<TableModel>(table.getModel()) {
@Override
protected boolean useToString(int column) {
return false;
}
@Override
public void toggleSortOrder(int column) {
List<? extends SortKey> sortKeys = getSortKeys();
if (sortKeys.size() > 0) {
if (sortKeys.get(0).getSortOrder() == SortOrder.DESCENDING) {
setSortKeys(null);
return;
}
}
super.toggleSortOrder(column);
}
};
table.setRowSorter(sorter);
}
public void adjustTableColumnsWidth(JTable table) {
DefaultTableModel dtm = (DefaultTableModel) table.getModel();
DefaultTableCellRenderer defaultTableCellRenderer = new DefaultTableCellRenderer();
for (int i = 0; i < table.getColumnCount(); i++) {
TableColumn column = table.getColumnModel().getColumn(i);
Component comp = defaultTableCellRenderer.getTableCellRendererComponent(table, column.getHeaderValue(),
false, false, 0, i);
int width = 1;
width = Math.max(width, comp.getPreferredSize().width);
int line = 0;
for (; line < table.getRowCount(); ++line) {
comp = table.getCellRenderer(line, i).getTableCellRendererComponent(table, dtm.getValueAt(line, i),
false, false, line, i);
width = Math.max(width, comp.getPreferredSize().width);
}
if (i >= 2) {
width = Math.min(width, 200);
}
column.setPreferredWidth(width);
}
table.getColumnModel().getColumn(table.getColumnModel().getColumnCount() - 1).setPreferredWidth(120);
}
/**
* Creates model for associations-list component.
*
* @return model for associations-list component
*/
private TableModel createAssociationsListModel() {
DefaultTableModel associationsTableModel = new DefaultTableModel(
new Object[] { "From", "To", "Condition", "Name", "Checked" }, 0) {
@Override
public boolean isCellEditable(int row, int column) {
return column == 4;
}
@Override
public Class<?> getColumnClass(int columnIndex) {
if(columnIndex == 4) {
return Boolean.class;
}
return super.getColumnClass(columnIndex);
}
@Override
public void setValueAt(Object aValue, int row, int column) {
super.setValueAt(aValue, row, column);
final Association association = atBorder.get(row);
if (Boolean.TRUE.equals(aValue)) {
decisionMade(association);
} else {
decisionUndone(association);
}
}
};
final JCheckBox checkBox = new JCheckBox(" ");
checkBox.setOpaque(true);
checkBox.setHorizontalAlignment(SwingConstants.CENTER);
DefaultCellEditor anEditor = new DefaultCellEditor(checkBox);
anEditor.setClickCountToStart(1);
associationsTable.setDefaultEditor(Boolean.class, anEditor);
for (Association a: atBorder) {
String name = a.reversalAssociation.getName();
associationsTableModel.addRow(
new Object[] {
dataModel.getDisplayName(a.source),
dataModel.getDisplayName(a.destination),
a.getUnrestrictedJoinCondition(),
name,
checked.contains(a) });
}
return associationsTableModel;
}
private void select(Association association) {
if (!extractionModelEditor.select(association)) {
extractionModelEditor.setRootSelection(association.destination);
extractionModelEditor.select(association);
}
}
/**
* Removes an association from "pendingDecisions" list.
*
* @param association the association
*/
public void decisionMade(Association association) {
checked.add(association);
dataModel.decisionPending.remove(association.reversalAssociation.getName());
decisionMade = true;
markDirty();
updateView();
}
/**
* Adds an association to "pendingDecisions" list.
*
* @param association the association
*/
public void decisionUndone(Association association) {
dataModel.decisionPending.add(association.reversalAssociation.getName());
checked.remove(association);
markDirty();
updateView();
}
protected boolean decisionMade = false;
protected abstract void toggleDockingState();
protected abstract void activate();
protected abstract void deactivate();
protected abstract void markDirty();
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
// <editor-fold defaultstate="collapsed" desc="Generated
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
java.awt.GridBagConstraints gridBagConstraints;
jPanel1 = new javax.swing.JPanel();
jScrollPane2 = new javax.swing.JScrollPane();
associationsTable = new javax.swing.JTable();
jPanel2 = new javax.swing.JPanel();
dockButton = new javax.swing.JButton();
jSeparator1 = new javax.swing.JSeparator();
checkAllButton = new javax.swing.JButton();
uncheckAllButton = new javax.swing.JButton();
jSeparator2 = new javax.swing.JSeparator();
clearButton = new javax.swing.JButton();
infoPanel = new javax.swing.JPanel();
jLabel1 = new javax.swing.JLabel();
setLayout(new java.awt.GridLayout(1, 0));
jPanel1.setLayout(new java.awt.GridBagLayout());
associationsTable.setAutoCreateRowSorter(true);
associationsTable.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
{null, null, null, null},
{null, null, null, null},
{null, null, null, null},
{null, null, null, null}
},
new String [] {
"Title 1", "Title 2", "Title 3", "Title 4"
}
));
jScrollPane2.setViewportView(associationsTable);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 1;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.weighty = 1.0;
jPanel1.add(jScrollPane2, gridBagConstraints);
jPanel2.setLayout(new java.awt.GridBagLayout());
dockButton.setText("Dock");
dockButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
dockButtonActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 1;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTH;
gridBagConstraints.weightx = 1.0;
jPanel2.add(dockButton, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.insets = new java.awt.Insets(2, 0, 2, 0);
jPanel2.add(jSeparator1, gridBagConstraints);
checkAllButton.setText("Check all");
checkAllButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
checkAllButtonActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 5;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTH;
gridBagConstraints.weightx = 1.0;
jPanel2.add(checkAllButton, gridBagConstraints);
uncheckAllButton.setText("Uncheck all");
uncheckAllButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
uncheckAllButtonActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 8;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTH;
gridBagConstraints.weightx = 1.0;
jPanel2.add(uncheckAllButton, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.insets = new java.awt.Insets(2, 0, 2, 0);
jPanel2.add(jSeparator2, gridBagConstraints);
clearButton.setText("Clear");
clearButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
clearButtonActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 14;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTH;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.weighty = 1.0;
jPanel2.add(clearButton, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 1;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
jPanel1.add(jPanel2, gridBagConstraints);
infoPanel.setBackground(new java.awt.Color(250, 255, 255));
infoPanel.setBorder(new javax.swing.border.SoftBevelBorder(javax.swing.border.BevelBorder.LOWERED));
infoPanel.setLayout(new java.awt.GridBagLayout());
jLabel1.setForeground(new java.awt.Color(0, 0, 62));
jLabel1.setText("<html>\nThis tool allows you to find and edit the newly added associations if the data model has been extended after the last change to this extraction model. <br>\nSelect associations here in the table and define restrictions, or check off an association if you're comfortable with the given restriction.\n</html>");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.weighty = 1.0;
gridBagConstraints.insets = new java.awt.Insets(2, 2, 2, 2);
infoPanel.add(jLabel1, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 0;
gridBagConstraints.gridwidth = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.insets = new java.awt.Insets(2, 2, 2, 2);
jPanel1.add(infoPanel, gridBagConstraints);
add(jPanel1);
}// </editor-fold>//GEN-END:initComponents
private void dockButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_dockButtonActionPerformed
toggleDockingState();
}//GEN-LAST:event_dockButtonActionPerformed
private void checkAllButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_checkAllButtonActionPerformed
List<Association> associations = new ArrayList<Association>();
for (Association association: atBorder) {
if (!checked.contains(association)) {
associations.add(association);
}
}
for (Association association: associations) {
decisionMade(association);
}
}//GEN-LAST:event_checkAllButtonActionPerformed
private void uncheckAllButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_uncheckAllButtonActionPerformed
List<Association> associations = new ArrayList<Association>();
Set<Association> atBorderSet = new HashSet<Association>(atBorder);
for (Association association: checked) {
if (atBorderSet.contains(association)) {
associations.add(association);
}
}
for (Association association: associations) {
decisionUndone(association);
}
}//GEN-LAST:event_uncheckAllButtonActionPerformed
private void clearButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_clearButtonActionPerformed
checked.clear();
updateView();
}//GEN-LAST:event_clearButtonActionPerformed
public void selectAssociation(Association selected) {
if (selected != null) {
int i = atBorder.indexOf(selected);
if (i < 0) {
i = atBorder.indexOf(selected.reversalAssociation);
}
if (i >= 0) {
try {
ignoreSelection = true;
i = associationsTable.getRowSorter().convertRowIndexToView(i);
associationsTable.getSelectionModel().setSelectionInterval(i, i);
} finally {
ignoreSelection = false;
}
}
}
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JTable associationsTable;
protected javax.swing.JButton checkAllButton;
protected javax.swing.JButton clearButton;
protected javax.swing.JButton dockButton;
protected javax.swing.JPanel infoPanel;
private javax.swing.JLabel jLabel1;
private javax.swing.JPanel jPanel1;
private javax.swing.JPanel jPanel2;
private javax.swing.JScrollPane jScrollPane2;
private javax.swing.JSeparator jSeparator1;
private javax.swing.JSeparator jSeparator2;
protected javax.swing.JButton uncheckAllButton;
// End of variables declaration//GEN-END:variables
}
| use Collections.sort instead of List.sort | src/main/gui/net/sf/jailer/ui/PendingDecisionsPanel.java | use Collections.sort instead of List.sort |
|
Java | bsd-2-clause | 2ca98f4c8d243fab8f40cbf7e1a0c5b19c8e9a32 | 0 | ghelmer/grading | import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.FileNotFoundException;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.net.URI;
import java.text.DateFormat;
import java.text.ParseException;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.Locale;
import java.util.Scanner;
import java.util.Timer;
import java.util.TimerTask;
import java.util.TreeMap;
import java.util.HashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.zip.ZipException;
import org.apache.tika.exception.TikaException;
import org.xml.sax.SAXException;
/**
* Check and record the results of processing a student's assignment.
*
* @author ghelmer
*
*/
public class AssignmentResults implements Comparable<AssignmentResults>{
public final double SUBMISSION_SIMILARITY_THRESHOLD = 0.75;
public final static int MAX_ERROR_OUTPUT_SIZE = 4096;
public final static int MAX_STD_OUTPUT_SIZE = 4096;
private String name;
private String fullName;
private File dir;
private ArrayList<String> userJavaFiles;
private HashMap<String, String> requestedUserJavaFilesContents;
private HashMap<String, String> otherFilesContents;
private ArrayList<String> javaFiles;
private ArrayList<String> otherFiles;
private ArrayList<String> missingFiles;
private TreeMap<String, Double> closestOtherSubmittedFiles;
private String compilationOutput;
private HashMap<String,String> programOutputs;
private Date firstSubmissionDate;
private double daysLate;
/**
* Compare two AssignmentResults instances based first on
* submission date and then on student name.
* @param other the other AssignmentResults entry to compare with
* @return negative, 0, or positive value
*/
public int compareTo(AssignmentResults other)
{
/*
int dateCompare = this.firstSubmissionDate.compareTo(other.firstSubmissionDate);
if (dateCompare != 0)
{
return dateCompare;
}
return this.name.compareTo(other.name);
}
*/
{
int fullNameCompare = this.fullName.compareTo(other.fullName);
if (fullNameCompare != 0)
{
return fullNameCompare;
}
return this.name.compareTo(other.name);
}
}
/**
* Create the object to store all the results of
* processing a student's assignment submission.
* @param _name - Name of student
* @param _dir - directory containing student submission files
*/
public AssignmentResults(String _name, String _fullName, File _dir)
{
name = _name;
if (_fullName == null)
{
fullName = _name;
}
else
{
fullName = _fullName;
}
dir = _dir;
missingFiles = new ArrayList<String>();
programOutputs = new HashMap<String,String>();
userJavaFiles = new ArrayList<String>();
requestedUserJavaFilesContents = new HashMap<String, String>();
otherFilesContents = new HashMap<String, String>();
closestOtherSubmittedFiles = new TreeMap<String, Double>();
daysLate = 0;
}
public String getName()
{
return name;
}
public String toString()
{
String separator = "--------------------------------------------------------------------------------\n";
StringBuffer r = new StringBuffer();
r.append("Name: " + name + " (" + fullName + ")\n");
r.append("Submitted: " + firstSubmissionDate);
if (daysLate > 0)
{
r.append(String.format(" (%.2f days late)", daysLate));
}
r.append("\n");
r.append(separator);
// Report unusually-similar submitted files.
if (closestOtherSubmittedFiles.size() > 0)
{
r.append("Closest other submitted files found:\n");
for (String key : closestOtherSubmittedFiles.keySet())
{
r.append(String.format("%s: %.2f%%\n", key, closestOtherSubmittedFiles.get(key) * 100.0));
}
r.append("\n");
r.append(separator);
}
r.append("Other files found:\n");
if (otherFiles != null)
{
for (String of : otherFiles)
{
r.append("--- Contents of " + of + " ---\n");
r.append(otherFilesContents.get(of));
r.append("\n--- End Contents of " + of + " ---\n");
}
}
r.append("Java files found:\n");
if (userJavaFiles != null)
{
for (String jf : userJavaFiles)
{
r.append("\t" + jf + "\n");
}
} else
{
r.append("\tNONE\n");
}
if (requestedUserJavaFilesContents.size() > 0)
{
r.append("Requested contents of Java files:\n");
for (String key : requestedUserJavaFilesContents.keySet())
{
r.append(requestedUserJavaFilesContents.get(key));
}
}
if (compilationOutput != null)
{
r.append("Compilation Output:\n-----\n" + compilationOutput + "\n-----\n");
}
else
{
r.append("*** Did not attempt to compile code. ***\n");
}
Object[] programOutputKeysObjects = programOutputs.keySet().toArray();
String[] programOutputKeys = new String[programOutputKeysObjects.length];
for (int i = 0; i < programOutputKeys.length; i++)
{
programOutputKeys[i] = (String) programOutputKeysObjects[i];
}
Arrays.sort(programOutputKeys);
for (Object _pn : programOutputKeys)
{
String pn = (String)_pn;
String programOutput = programOutputs.get(pn);
if (programOutput != null)
{
if (programOutput.length() > MAX_STD_OUTPUT_SIZE)
{
programOutput = programOutput.substring(0, MAX_STD_OUTPUT_SIZE);
}
r.append(String.format("Output from program %s:\n", pn));
r.append(programOutput);
}
else
{
r.append(String.format("No output from program %s\n", pn));
}
}
return r.toString();
}
/**
* Get the contents of the given file as a String.
*/
public String getFileAsString(File f) throws FileNotFoundException
{
StringBuffer sb = new StringBuffer();
Scanner in = new Scanner(f);
while (in.hasNextLine())
{
sb.append(in.nextLine());
sb.append('\n');
}
in.close();
return sb.toString();
}
/**
* Return the text from a document in the submission.
* @throws TikaException
* @throws SAXException
*/
public String getDocumentText(String docFilename) throws IOException, SAXException, TikaException
{
File inputFile = new File(dir.getAbsolutePath() + File.separator + docFilename);
if (docFilename.endsWith(".txt"))
{
return getFileAsString(inputFile);
}
/* Otherwise: Use CleanContent to get text from file. */
File outputFile = File.createTempFile(inputFile.getName(), ".out", inputFile.getParentFile());
String about = ExtractTextFromFile.getText(inputFile, outputFile);
String s = getFileAsString(outputFile);
outputFile.delete();
return about + "::\n" + s;
}
/**
* Get the text for the other files submitted by student.
*/
public void readOtherFilesContents()
{
for (String of : otherFiles)
{
String contents;
try
{
contents = getDocumentText(of);
}
catch (IOException e)
{
StackTraceElement[] sts = e.getStackTrace();
StringBuffer sb = new StringBuffer();
sb.append("Exception:\n");
for (StackTraceElement st : sts)
{
sb.append('\t');
sb.append(st.toString());
sb.append('\n');
}
contents = "Exception: " + sb.toString();
} catch (SAXException e) {
StackTraceElement[] sts = e.getStackTrace();
StringBuffer sb = new StringBuffer();
sb.append("Exception:\n");
for (StackTraceElement st : sts)
{
sb.append('\t');
sb.append(st.toString());
sb.append('\n');
}
contents = "SAXException: " + sb.toString();
} catch (TikaException e) {
StackTraceElement[] sts = e.getStackTrace();
StringBuffer sb = new StringBuffer();
sb.append("Exception:\n");
for (StackTraceElement st : sts)
{
sb.append('\t');
sb.append(st.toString());
sb.append('\n');
}
contents = "TikaException: " + sb.toString();
}
otherFilesContents.put(of, contents);
}
}
/**
* Compare this submission's text file contents with the other submission.
* @return TreeMap of closest match (may contain more than one if same score)
*/
public TreeMap<String, Double> findClosestMatches(AssignmentResults other, double threshold)
{
TreeMap<String, Double> similarFiles = new TreeMap<>();
for (String thisKey : this.otherFilesContents.keySet())
{
if (!thisKey.endsWith("submission.txt") &&
!thisKey.equals("database.properties"))
{
double bestMatch = 0;
ArrayList<String> matches = new ArrayList<String>();
for (String otherKey : other.otherFilesContents.keySet())
{
if (!otherKey.endsWith("submission.txt") &&
!thisKey.equals("database.properties"))
{
double thisMatch = StringSimilarity.similarity
(otherFilesContents.get(thisKey), other.otherFilesContents.get(otherKey));
if (thisMatch > bestMatch)
{
matches.clear();
matches.add(thisKey + ":" + other.name + ":" + otherKey);
bestMatch = thisMatch;
}
else if (thisMatch == bestMatch)
{
matches.add(thisKey + ":" + other.name + ":" + otherKey);
}
}
}
if (bestMatch >= threshold)
{
for (String s : matches)
{
similarFiles.put(s, bestMatch);
}
}
}
}
return similarFiles;
}
/**
* Find the closest submitted files to this assignment's submission.
*/
public void findClosestMatchesInAllSubmissions(ArrayList<AssignmentResults> assignments)
{
double closestMatch = SUBMISSION_SIMILARITY_THRESHOLD;
for (AssignmentResults other : assignments)
{
// Don't check an assignment's own submitted files.
if (this != other)
{
TreeMap<String, Double> closestMatches = findClosestMatches(other, closestMatch);
if (closestMatches.size() > 0 &&
closestMatches.firstEntry().getValue() > closestMatch)
{
// Discard lower-scoring files.
closestOtherSubmittedFiles.clear();
closestMatch = closestMatches.firstEntry().getValue();
}
for (String key : closestMatches.keySet())
{
closestOtherSubmittedFiles.put(key, closestMatches.get(key));
}
}
}
}
/**
* In the user directory, collect the Java files in the submissions (ordered by
* date) in the user's directory for compilation and execution.
*
*/
public void copyJavaFilesToUser() throws IOException
{
Collections.sort(javaFiles);
for (String jFile : javaFiles)
{
/* Copy to user directory. */
File srcFile = new File(dir.getAbsolutePath() + File.separator + jFile);
File destFile = new File(dir.getAbsolutePath() + File.separator + srcFile.getName());
CopyFile.copy(srcFile, destFile);
boolean found = false;
for (int i = 0; i < userJavaFiles.size() && !found; i++)
{
if (userJavaFiles.get(i).equals(destFile.getName()))
{
found = true;
}
}
if (!found)
{
userJavaFiles.add(destFile.getName());
}
}
}
/**
* Find earliest date of submission in the files.
* If submission date is after the due date, compute the
* number of days late.
*/
public void findSubmissionDate(Date dueDate) throws IOException
{
/* Date Submitted:Friday, October 21, 2011 4:22:36 PM CDT */
/* Date Submitted: Friday, February 28, 2014 12:46:42 AM CST */
Pattern dsPattern = Pattern.compile("^Date Submitted:\\s*\\S+, (\\S+ \\d+, \\d+ \\d+:\\d+:\\d+ \\S+ \\S+)$");
DateFormat df = DateFormat.getDateTimeInstance(DateFormat.LONG, DateFormat.LONG, Locale.US);
@SuppressWarnings("unchecked")
ArrayList<String> otherFilesCopy = (ArrayList<String>)otherFiles.clone();
Collections.sort(otherFilesCopy);
for (String oFn : otherFilesCopy)
{
/* Build absolute pathname. */
File oFile = new File(dir.getAbsolutePath() + File.separator + oFn);
if (oFile.getName().equalsIgnoreCase("submission.txt"))
{
/* Open and read file until Date Submitted line is found. */
Scanner in = new Scanner(oFile);
boolean found = false;
while (in.hasNextLine() && !found)
{
String line = in.nextLine();
Matcher m = dsPattern.matcher(line);
if (m.matches())
{
found = true;
String dateString = m.group(1);
try
{
Date parsedDate = df.parse(dateString);
if (firstSubmissionDate == null || parsedDate.compareTo(firstSubmissionDate) < 0)
{
firstSubmissionDate = parsedDate;
}
}
catch (ParseException pe)
{
pe.printStackTrace();
}
}
}
in.close();
}
}
if (firstSubmissionDate == null)
{
try
{
firstSubmissionDate = df.parse("January 1, 2000 00:00:00 AM CST");
}
catch (ParseException pe)
{
pe.printStackTrace();
}
}
else
{
if (firstSubmissionDate.after(dueDate))
{
daysLate = (firstSubmissionDate.getTime() - dueDate.getTime()) /
(double)(24 * 60 * 60 * 1000);
}
}
}
/**
* Strip the "package ...;" statement, if any, from Java files in the user's
* directory.
*/
public void stripPackageFromJavaFiles() throws IOException
{
for (String jFile : userJavaFiles)
{
File srcFile = new File(dir.getAbsolutePath() + File.separator + jFile);
File destFile = new File(dir.getAbsolutePath() + File.separator + jFile + ".new");
Scanner in = new Scanner(srcFile);
PrintWriter out = new PrintWriter(destFile);
boolean replace = false;
while (in.hasNextLine())
{
String s = in.nextLine();
if (!s.startsWith("package "))
{
out.println(s);
}
else
{
replace = true;
}
}
in.close();
out.close();
if (replace)
{
long modTime = srcFile.lastModified();
if (modTime != 0L)
{
destFile.setLastModified(modTime);
}
destFile.renameTo(srcFile);
}
else
{
destFile.delete();
}
}
}
/**
* Organize BlackBoard download.
* For each file in the directory, if it is in the form
* Homework201_njvang_attempt_2011-09-02-20-09-43_BankAccount.java
* then move the file into a directory <user>/<date>/<file>.
*
*/
public static void organizeBlackBoardFiles(File dir) throws IOException
{
if (!dir.isDirectory()) {
throw new IllegalArgumentException(dir.getName() + " is not a directory");
}
/* First step: organize files into per-user / per-submission directories. */
File[] allFiles = dir.listFiles();
Pattern hwFilePattern = Pattern.compile("[A-Z][ a-zA-Z0-9]*_(.*)_attempt_([0-9-]*)[_.](.*)$");
for (File d : allFiles)
{
Matcher m = hwFilePattern.matcher(d.getName());
if (m.matches())
{
String user = m.group(1);
String date = m.group(2);
String fn = m.group(3);
if (fn.equals("txt"))
{
fn = "submission.txt";
}
File userDir = new File(dir.getAbsolutePath() + File.separator + user);
if (!userDir.isDirectory())
{
userDir.mkdir();
}
File submissionDir = new File(dir.getAbsolutePath() + File.separator + user + File.separator + date);
if (!submissionDir.isDirectory())
{
submissionDir.mkdir();
}
File destFile = new File(submissionDir.getAbsolutePath() + File.separator + fn);
d.renameTo(destFile);
}
}
}
/**
* Find the files in the directory.
* Build a list of the files.
*
* @param dir - Directory to examine
* @return true if successful
* @throws IOException
*/
public void findFiles(File dir) throws IOException
{
ArrayList<String> jfs = new ArrayList<String>();
ArrayList<String> ofs = new ArrayList<String>();
if (!dir.isDirectory()) {
throw new IllegalArgumentException(dir.getName() + " is not a directory");
}
ArrayList<String> allFiles = new ArrayList<String>();
findFiles(dir, "", allFiles);
for (String e : allFiles)
{
File f = new File (e);
if (f.getName().startsWith("."))
{
/* Ignore names starting with '.'. */
continue;
}
if (e.endsWith(".classpath"))
{
/* Ignore. */
continue;
}
if (e.endsWith(".DS_Store"))
{
/* Ignore. */
continue;
}
if (e.endsWith(".prefs"))
{
/* Ignore. */
continue;
}
if (e.endsWith(".class"))
{
/* Ignore. */
continue;
}
if (e.endsWith(".jar"))
{
/* Ignore. */
continue;
}
else if (e.endsWith(".java"))
{
jfs.add(e);
}
else
{
ofs.add(e);
}
}
javaFiles = jfs;
otherFiles = ofs;
}
/**
* Recursively find all the files in the subdirectory.
*
* @param dir - Directory to examine
* @param subdirName - relative pathname for this directory
* @return The list of all files found, including the
* relative path starting from the top-level directory.
* @throws IOException
*/
public void findFiles(File dir, String subdirName, ArrayList<String> foundFiles) throws IOException
{
if (!dir.isDirectory()) {
throw new IllegalArgumentException(subdirName + dir.getName() + " is not a directory");
}
for (File e : dir.listFiles())
{
if (e.getName().startsWith("."))
{
/* Ignore names starting with '.'. */
continue;
}
if (e.isDirectory())
{
/*
* Enter the subdirectory and find its files.
*/
findFiles(e, subdirName + e.getName() + File.separator, foundFiles);
}
else if (e.isFile())
{
if (e.getName().endsWith(".zip"))
{
/*
* Extract and add files to list.
*/
try
{
ArrayList<String> unzippedFiles = Unzip.unzip(e);
for (String s : unzippedFiles)
{
foundFiles.add(subdirName + s);
}
}
catch (ZipException ze)
{
ze.printStackTrace();
}
}
else
{
if (subdirName.length() == 0 && e.getName().endsWith(".java"))
{
System.err.println("Skipping Java file " + subdirName + e.getName() + " in student " + name + " root directory");
}
else
{
foundFiles.add(subdirName + e.getName());
}
}
}
else
{
System.err.println(subdirName + e.getName() + " is not a directory or a file");
}
}
}
/**
* Check the list of java files found in the student's directory.
* Set any missing java files in the missingJavaFiles list.
*
* @param foundFiles - List of Java files found
* @param missingFiles - List of required files that were not found
* @return true if all required files were found, or false otherwise.
*/
public boolean checkRequiredJavaFiles(ProgramInfo[] programs)
{
boolean result = true;
for (ProgramInfo pi : programs)
{
for (AssignmentClasses clss : pi.getClasses())
{
String className = clss.getClassName() + ".java";
boolean fileFound = false;
for (int i = 0; i < userJavaFiles.size() && !fileFound; i++)
{
String foundFile = userJavaFiles.get(i);
int lastSlash = foundFile.lastIndexOf(File.separator);
if (lastSlash != -1)
{
foundFile = foundFile.substring(lastSlash + 1);
}
if (foundFile.equals(className))
{
fileFound = true;
}
}
if (!fileFound)
{
missingFiles.add(className);
result = false;
}
}
}
return result;
}
/**
* Obtain the contents of specified java files copied to the student's root
* directory for inclusion in the report.
*
* @param programs - array of programs classes
*/
public void showRequestedJavaFiles(ProgramInfo[] programs)
{
for (ProgramInfo pi : programs)
{
for (AssignmentClasses clss : pi.getClasses())
{
if (clss.showClass())
{
StringBuffer r = new StringBuffer();
String className = clss.getClassName() + ".java";
String foundFilename = null;
for (int i = 0; i < userJavaFiles.size() && foundFilename == null; i++)
{
String foundFile = userJavaFiles.get(i);
int lastSlash = foundFile.lastIndexOf(File.separator);
if (lastSlash != -1)
{
foundFile = foundFile.substring(lastSlash + 1);
}
if (foundFile.equals(className))
{
foundFilename = dir.getAbsolutePath() + File.separator + foundFile;
}
}
if (foundFilename == null)
{
r.append("---- Java file " + className + " NOT FOUND! ----\n");
}
else
{
r.append("---- Java file " + className + " ----\n");
try
{
r.append(getFileAsString(new File(foundFilename)));
}
catch (IOException e)
{
r.append("Exception " + e.getMessage() + " while reading file " + foundFilename + "\n");
StackTraceElement[] sts = e.getStackTrace();
for (StackTraceElement st : sts)
{
r.append('\t');
r.append(st.toString());
r.append('\n');
}
}
r.append("\n---- End of Java file " + foundFilename + " ----\n");
}
requestedUserJavaFilesContents.put(className, r.toString());
}
}
}
}
/**
* Compile the Java source files under the given directory.
* Return a string describing the results.
*
* @param dir - working directory
* @param sourceFiles - Array of files to compile
* @return status
*/
public boolean compileJavaFiles(File dir) throws IOException, InterruptedException
{
StringBuffer output = new StringBuffer();
if (userJavaFiles.size() == 0)
return true;
Runtime r = Runtime.getRuntime();
String[] cmd = new String[userJavaFiles.size() + 1];
cmd[0] = "javac";
for (int i = 1; i <= userJavaFiles.size(); i++)
cmd[i] = userJavaFiles.get(i - 1);
Process result = r.exec(cmd, null, dir);
Scanner in = new Scanner(result.getErrorStream());
while (in.hasNextLine())
{
//System.out.println("Output from javac: " + in.nextLine());
output.append(in.nextLine() + "\n");
}
in.close();
result.waitFor();
if (output.length() != 0)
{
compilationOutput = output.toString();
return false;
}
else if (result.exitValue() != 0)
{
compilationOutput = "Compiler exit code: " + result.exitValue();
return false;
}
compilationOutput = "OK";
return true;
}
/**
* Run each of the programs that should have been submitted.
*
* @param programs - Programs and run configurations
* @param dir - working directory in which to run the programs
* @throws IOException
* @throws InterruptedException
*/
public void runJavaPrograms(ProgramInfo[] programs, File dir) throws IOException, InterruptedException
{
Runtime r = Runtime.getRuntime();
for (ProgramInfo pi : programs)
{
String program = pi.getName();
String classpath = pi.getClasspath();
URI securityPolicyURI = null;
String securityPolicyFileStr = pi.getSecurityPolicyFile();
if (securityPolicyFileStr != null)
{
File securityPolicyFile = new File(securityPolicyFileStr);
securityPolicyURI = securityPolicyFile.toURI();
}
for (RunConfiguration rc : pi.getRunConfigurations())
{
StringBuffer output = new StringBuffer();
String[] args = rc.getArguments();
String[] properties = rc.getProperties();
int maxArgs = args.length + 2;
if (classpath != null)
{
maxArgs += 2;
}
if (securityPolicyURI != null)
{
maxArgs += 3;
}
if (properties != null)
{
maxArgs += properties.length;
}
int numArgs = 0;
String[] cmd = new String[maxArgs];
cmd[numArgs++] = "java";
if (classpath != null)
{
cmd[numArgs++] = "-classpath";
cmd[numArgs++] = classpath;
}
if (securityPolicyURI != null)
{
// Set parameter grading.base to the directory containing the directory
// that contains the homework submissions.
cmd[numArgs++] = "-Dgrading.base=" + dir.getParentFile().getParent();
// System.out.println("grading.base=" + cmd[numArgs - 1]);
cmd[numArgs++] = "-Djava.security.manager";
cmd[numArgs++] = "-Djava.security.policy==" + securityPolicyURI.toString();
}
if (properties != null)
{
// Add any properties specified in the RunConfiguration
// to the command line.
for (String p : properties)
{
cmd[numArgs++] = String.format("-D%s", p);
}
}
cmd[numArgs++] = program;
System.arraycopy(args, 0, cmd, numArgs, args.length);
numArgs += args.length;
System.out.println("Running " + name + " program " + program);
try
{
/* Setup input for the program, if any. */
BufferedInputStream programInput = null;
FileInputStream inputStream = rc.openInputFile(dir.getAbsolutePath());
if (inputStream != null)
{
programInput = new BufferedInputStream(inputStream);
}
StreamConnector stdinConnector = null;
/*
* Setup output for the program, either to an output file or
* memory buffer.
*/
OutputStream outputStream = rc.openOutputFile(dir.getAbsolutePath());
ByteArrayOutputStream storedOutputStream = null;
if (outputStream == null)
{
/* Collect output in a ByteArrayOutputStream. */
storedOutputStream = new ByteArrayOutputStream(MAX_STD_OUTPUT_SIZE);
outputStream = storedOutputStream;
}
BufferedOutputStream programOutput = new BufferedOutputStream(outputStream);
StreamConnector stdoutConnector = null;
/* Setup error output for the program. */
ByteArrayOutputStream storedErrorStream = new ByteArrayOutputStream(MAX_ERROR_OUTPUT_SIZE);
BufferedOutputStream programError = new BufferedOutputStream(storedErrorStream);
StreamConnector stderrConnector = null;
try
{
final Process process = r.exec(cmd, null, dir);
/*
* Connect and start the threads to copy stdin, stdout,
* and stderr for the child process.
*/
if (programInput != null)
{
stdinConnector = new StreamConnector(programInput, process.getOutputStream(), false, true, "StdIn");
stdinConnector.start();
}
stdoutConnector = new StreamConnector(process.getInputStream(), programOutput, "StdOut");
stdoutConnector.start();
stderrConnector = new StreamConnector(process.getErrorStream(), programError, "StdErr");
stderrConnector.start();
/*
* Setup a timer to expire and terminate the child
* process after a reasonable delay.
*/
Timer t = new Timer();
t.schedule(new TimerTask() {
@Override
public void run() {
process.destroy();
}
}, 15000); // it will kill the process after 15 seconds (if it's not finished yet).
process.waitFor();
t.cancel();
stdoutConnector.join();
stderrConnector.join();
/* Read stdout and stderr into output arraylist. */
if (storedOutputStream != null)
{
if (storedOutputStream.size() == 0)
{
output.append(String.format("----- No output from %s java %s run %s ------\n", name, program, rc.getName()));
}
else
{
output.append(String.format("----- Start of output from %s java %s run %s ------\n", name, program, rc.getName()));
ByteArrayInputStream conv = new ByteArrayInputStream(storedOutputStream.toByteArray());
Scanner in = new Scanner(conv);
while (in.hasNextLine())
{
output.append(in.nextLine());
output.append('\n');
}
in.close();
output.append(String.format("----- End of output from %s java %s run %s ------\n", name, program, rc.getName()));
}
}
if (storedErrorStream != null)
{
if (storedErrorStream.size() != 0)
{
output.append(String.format("----- Start of error output from %s java %s run %s ------\n", name, program, rc.getName()));
ByteArrayInputStream conv = new ByteArrayInputStream(storedErrorStream.toByteArray());
Scanner errIn = new Scanner(conv);
while (errIn.hasNextLine())
{
output.append(errIn.nextLine());
output.append('\n');
}
errIn.close();
output.append(String.format("----- End of error output from %s java %s run %s ------\n", name, program, rc.getName()));
}
}
if (process.exitValue() != 0)
{
output.append(String.format("*** %s java %s run %s exit code %d\n", name, program, rc.getName(), process.exitValue()));
}
programOutputs.put(program + '.' + rc.getName(), output.toString());
}
finally
{
if (programInput != null)
{
programInput.close();
}
if (programOutput != null)
{
programOutput.close();
}
}
}
catch (IOException e)
{
programOutputs.put(program + '.' + rc.getName(), "IOException: " + e.getMessage());
}
catch (InterruptedException e)
{
programOutputs.put(program + '.' + rc.getName(), "InterruptedException: " + e.getMessage());
}
}
}
}
public int hashCode()
{
return name.hashCode();
}
public boolean equals(AssignmentResults ar)
{
return name.equals(ar.name);
}
}
| src/AssignmentResults.java | import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.FileNotFoundException;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.net.URI;
import java.text.DateFormat;
import java.text.ParseException;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.Locale;
import java.util.Scanner;
import java.util.Timer;
import java.util.TimerTask;
import java.util.TreeMap;
import java.util.HashMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.zip.ZipException;
import org.apache.tika.exception.TikaException;
import org.xml.sax.SAXException;
/**
* Check and record the results of processing a student's assignment.
*
* @author ghelmer
*
*/
public class AssignmentResults implements Comparable<AssignmentResults>{
public final double SUBMISSION_SIMILARITY_THRESHOLD = 0.75;
public final static int MAX_ERROR_OUTPUT_SIZE = 4096;
public final static int MAX_STD_OUTPUT_SIZE = 4096;
private String name;
private String fullName;
private File dir;
private ArrayList<String> userJavaFiles;
private HashMap<String, String> requestedUserJavaFilesContents;
private HashMap<String, String> otherFilesContents;
private ArrayList<String> javaFiles;
private ArrayList<String> otherFiles;
private ArrayList<String> missingFiles;
private TreeMap<String, Double> closestOtherSubmittedFiles;
private String compilationOutput;
private HashMap<String,String> programOutputs;
private Date firstSubmissionDate;
private double daysLate;
/**
* Compare two AssignmentResults instances based first on
* submission date and then on student name.
* @param other the other AssignmentResults entry to compare with
* @return negative, 0, or positive value
*/
public int compareTo(AssignmentResults other)
{
/*
int dateCompare = this.firstSubmissionDate.compareTo(other.firstSubmissionDate);
if (dateCompare != 0)
{
return dateCompare;
}
return this.name.compareTo(other.name);
}
*/
{
int fullNameCompare = this.fullName.compareTo(other.fullName);
if (fullNameCompare != 0)
{
return fullNameCompare;
}
return this.name.compareTo(other.name);
}
}
/**
* Create the object to store all the results of
* processing a student's assignment submission.
* @param _name - Name of student
* @param _dir - directory containing student submission files
*/
public AssignmentResults(String _name, String _fullName, File _dir)
{
name = _name;
if (_fullName == null)
{
fullName = _name;
}
else
{
fullName = _fullName;
}
dir = _dir;
missingFiles = new ArrayList<String>();
programOutputs = new HashMap<String,String>();
userJavaFiles = new ArrayList<String>();
requestedUserJavaFilesContents = new HashMap<String, String>();
otherFilesContents = new HashMap<String, String>();
closestOtherSubmittedFiles = new TreeMap<String, Double>();
daysLate = 0;
}
public String getName()
{
return name;
}
public String toString()
{
String separator = "--------------------------------------------------------------------------------\n";
StringBuffer r = new StringBuffer();
r.append("Name: " + name + " (" + fullName + ")\n");
r.append("Submitted: " + firstSubmissionDate);
if (daysLate > 0)
{
r.append(String.format(" (%.2f days late)", daysLate));
}
r.append("\n");
r.append(separator);
// Report unusually-similar submitted files.
if (closestOtherSubmittedFiles.size() > 0)
{
r.append("Closest other submitted files found:\n");
for (String key : closestOtherSubmittedFiles.keySet())
{
r.append(String.format("%s: %.2f%%\n", key, closestOtherSubmittedFiles.get(key) * 100.0));
}
r.append("\n");
r.append(separator);
}
r.append("Other files found:\n");
if (otherFiles != null)
{
for (String of : otherFiles)
{
r.append("--- Contents of " + of + " ---\n");
r.append(otherFilesContents.get(of));
r.append("\n--- End Contents of " + of + " ---\n");
}
}
r.append("Java files found:\n");
if (userJavaFiles != null)
{
for (String jf : userJavaFiles)
{
r.append("\t" + jf + "\n");
}
} else
{
r.append("\tNONE\n");
}
if (requestedUserJavaFilesContents.size() > 0)
{
r.append("Requested contents of Java files:\n");
for (String key : requestedUserJavaFilesContents.keySet())
{
r.append(requestedUserJavaFilesContents.get(key));
}
}
if (compilationOutput != null)
{
r.append("Compilation Output:\n-----\n" + compilationOutput + "\n-----\n");
}
else
{
r.append("*** Did not attempt to compile code. ***\n");
}
Object[] programOutputKeysObjects = programOutputs.keySet().toArray();
String[] programOutputKeys = new String[programOutputKeysObjects.length];
for (int i = 0; i < programOutputKeys.length; i++)
{
programOutputKeys[i] = (String) programOutputKeysObjects[i];
}
Arrays.sort(programOutputKeys);
for (Object _pn : programOutputKeys)
{
String pn = (String)_pn;
String programOutput = programOutputs.get(pn);
if (programOutput != null)
{
if (programOutput.length() > MAX_STD_OUTPUT_SIZE)
{
programOutput = programOutput.substring(0, MAX_STD_OUTPUT_SIZE);
}
r.append(String.format("Output from program %s:\n", pn));
r.append(programOutput);
}
else
{
r.append(String.format("No output from program %s\n", pn));
}
}
return r.toString();
}
/**
* Get the contents of the given file as a String.
*/
public String getFileAsString(File f) throws FileNotFoundException
{
StringBuffer sb = new StringBuffer();
Scanner in = new Scanner(f);
while (in.hasNextLine())
{
sb.append(in.nextLine());
sb.append('\n');
}
in.close();
return sb.toString();
}
/**
* Return the text from a document in the submission.
* @throws TikaException
* @throws SAXException
*/
public String getDocumentText(String docFilename) throws IOException, SAXException, TikaException
{
File inputFile = new File(dir.getAbsolutePath() + File.separator + docFilename);
if (docFilename.endsWith(".txt"))
{
return getFileAsString(inputFile);
}
/* Otherwise: Use CleanContent to get text from file. */
File outputFile = File.createTempFile(inputFile.getName(), ".out", inputFile.getParentFile());
String about = ExtractTextFromFile.getText(inputFile, outputFile);
String s = getFileAsString(outputFile);
outputFile.delete();
return about + "::\n" + s;
}
/**
* Get the text for the other files submitted by student.
*/
public void readOtherFilesContents()
{
for (String of : otherFiles)
{
String contents;
try
{
contents = getDocumentText(of);
}
catch (IOException e)
{
StackTraceElement[] sts = e.getStackTrace();
StringBuffer sb = new StringBuffer();
sb.append("Exception:\n");
for (StackTraceElement st : sts)
{
sb.append('\t');
sb.append(st.toString());
sb.append('\n');
}
contents = "Exception: " + sb.toString();
} catch (SAXException e) {
StackTraceElement[] sts = e.getStackTrace();
StringBuffer sb = new StringBuffer();
sb.append("Exception:\n");
for (StackTraceElement st : sts)
{
sb.append('\t');
sb.append(st.toString());
sb.append('\n');
}
contents = "SAXException: " + sb.toString();
} catch (TikaException e) {
StackTraceElement[] sts = e.getStackTrace();
StringBuffer sb = new StringBuffer();
sb.append("Exception:\n");
for (StackTraceElement st : sts)
{
sb.append('\t');
sb.append(st.toString());
sb.append('\n');
}
contents = "TikaException: " + sb.toString();
}
otherFilesContents.put(of, contents);
}
}
/**
* Compare this submission's text file contents with the other submission.
* @return TreeMap of closest match (may contain more than one if same score)
*/
public TreeMap<String, Double> findClosestMatches(AssignmentResults other, double threshold)
{
TreeMap<String, Double> similarFiles = new TreeMap<>();
for (String thisKey : this.otherFilesContents.keySet())
{
if (!thisKey.endsWith("submission.txt"))
{
double bestMatch = 0;
ArrayList<String> matches = new ArrayList<String>();
for (String otherKey : other.otherFilesContents.keySet())
{
if (!otherKey.endsWith("submission.txt"))
{
double thisMatch = StringSimilarity.similarity
(otherFilesContents.get(thisKey), other.otherFilesContents.get(otherKey));
if (thisMatch > bestMatch)
{
matches.clear();
matches.add(thisKey + ":" + other.name + ":" + otherKey);
bestMatch = thisMatch;
}
else if (thisMatch == bestMatch)
{
matches.add(thisKey + ":" + other.name + ":" + otherKey);
}
}
}
if (bestMatch >= threshold)
{
for (String s : matches)
{
similarFiles.put(s, bestMatch);
}
}
}
}
return similarFiles;
}
/**
* Find the closest submitted files to this assignment's submission.
*/
public void findClosestMatchesInAllSubmissions(ArrayList<AssignmentResults> assignments)
{
double closestMatch = SUBMISSION_SIMILARITY_THRESHOLD;
for (AssignmentResults other : assignments)
{
// Don't check an assignment's own submitted files.
if (this != other)
{
TreeMap<String, Double> closestMatches = findClosestMatches(other, closestMatch);
if (closestMatches.size() > 0 &&
closestMatches.firstEntry().getValue() > closestMatch)
{
// Discard lower-scoring files.
closestOtherSubmittedFiles.clear();
closestMatch = closestMatches.firstEntry().getValue();
}
for (String key : closestMatches.keySet())
{
closestOtherSubmittedFiles.put(key, closestMatches.get(key));
}
}
}
}
/**
* In the user directory, collect the Java files in the submissions (ordered by
* date) in the user's directory for compilation and execution.
*
*/
public void copyJavaFilesToUser() throws IOException
{
Collections.sort(javaFiles);
for (String jFile : javaFiles)
{
/* Copy to user directory. */
File srcFile = new File(dir.getAbsolutePath() + File.separator + jFile);
File destFile = new File(dir.getAbsolutePath() + File.separator + srcFile.getName());
CopyFile.copy(srcFile, destFile);
boolean found = false;
for (int i = 0; i < userJavaFiles.size() && !found; i++)
{
if (userJavaFiles.get(i).equals(destFile.getName()))
{
found = true;
}
}
if (!found)
{
userJavaFiles.add(destFile.getName());
}
}
}
/**
* Find earliest date of submission in the files.
* If submission date is after the due date, compute the
* number of days late.
*/
public void findSubmissionDate(Date dueDate) throws IOException
{
/* Date Submitted:Friday, October 21, 2011 4:22:36 PM CDT */
/* Date Submitted: Friday, February 28, 2014 12:46:42 AM CST */
Pattern dsPattern = Pattern.compile("^Date Submitted:\\s*\\S+, (\\S+ \\d+, \\d+ \\d+:\\d+:\\d+ \\S+ \\S+)$");
DateFormat df = DateFormat.getDateTimeInstance(DateFormat.LONG, DateFormat.LONG, Locale.US);
@SuppressWarnings("unchecked")
ArrayList<String> otherFilesCopy = (ArrayList<String>)otherFiles.clone();
Collections.sort(otherFilesCopy);
for (String oFn : otherFilesCopy)
{
/* Build absolute pathname. */
File oFile = new File(dir.getAbsolutePath() + File.separator + oFn);
if (oFile.getName().equalsIgnoreCase("submission.txt"))
{
/* Open and read file until Date Submitted line is found. */
Scanner in = new Scanner(oFile);
boolean found = false;
while (in.hasNextLine() && !found)
{
String line = in.nextLine();
Matcher m = dsPattern.matcher(line);
if (m.matches())
{
found = true;
String dateString = m.group(1);
try
{
Date parsedDate = df.parse(dateString);
if (firstSubmissionDate == null || parsedDate.compareTo(firstSubmissionDate) < 0)
{
firstSubmissionDate = parsedDate;
}
}
catch (ParseException pe)
{
pe.printStackTrace();
}
}
}
in.close();
}
}
if (firstSubmissionDate == null)
{
try
{
firstSubmissionDate = df.parse("January 1, 2000 00:00:00 AM CST");
}
catch (ParseException pe)
{
pe.printStackTrace();
}
}
else
{
if (firstSubmissionDate.after(dueDate))
{
daysLate = (firstSubmissionDate.getTime() - dueDate.getTime()) /
(double)(24 * 60 * 60 * 1000);
}
}
}
/**
* Strip the "package ...;" statement, if any, from Java files in the user's
* directory.
*/
public void stripPackageFromJavaFiles() throws IOException
{
for (String jFile : userJavaFiles)
{
File srcFile = new File(dir.getAbsolutePath() + File.separator + jFile);
File destFile = new File(dir.getAbsolutePath() + File.separator + jFile + ".new");
Scanner in = new Scanner(srcFile);
PrintWriter out = new PrintWriter(destFile);
boolean replace = false;
while (in.hasNextLine())
{
String s = in.nextLine();
if (!s.startsWith("package "))
{
out.println(s);
}
else
{
replace = true;
}
}
in.close();
out.close();
if (replace)
{
long modTime = srcFile.lastModified();
if (modTime != 0L)
{
destFile.setLastModified(modTime);
}
destFile.renameTo(srcFile);
}
else
{
destFile.delete();
}
}
}
/**
* Organize BlackBoard download.
* For each file in the directory, if it is in the form
* Homework201_njvang_attempt_2011-09-02-20-09-43_BankAccount.java
* then move the file into a directory <user>/<date>/<file>.
*
*/
public static void organizeBlackBoardFiles(File dir) throws IOException
{
if (!dir.isDirectory()) {
throw new IllegalArgumentException(dir.getName() + " is not a directory");
}
/* First step: organize files into per-user / per-submission directories. */
File[] allFiles = dir.listFiles();
Pattern hwFilePattern = Pattern.compile("[A-Z][ a-zA-Z0-9]*_(.*)_attempt_([0-9-]*)[_.](.*)$");
for (File d : allFiles)
{
Matcher m = hwFilePattern.matcher(d.getName());
if (m.matches())
{
String user = m.group(1);
String date = m.group(2);
String fn = m.group(3);
if (fn.equals("txt"))
{
fn = "submission.txt";
}
File userDir = new File(dir.getAbsolutePath() + File.separator + user);
if (!userDir.isDirectory())
{
userDir.mkdir();
}
File submissionDir = new File(dir.getAbsolutePath() + File.separator + user + File.separator + date);
if (!submissionDir.isDirectory())
{
submissionDir.mkdir();
}
File destFile = new File(submissionDir.getAbsolutePath() + File.separator + fn);
d.renameTo(destFile);
}
}
}
/**
* Find the files in the directory.
* Build a list of the files.
*
* @param dir - Directory to examine
* @return true if successful
* @throws IOException
*/
public void findFiles(File dir) throws IOException
{
ArrayList<String> jfs = new ArrayList<String>();
ArrayList<String> ofs = new ArrayList<String>();
if (!dir.isDirectory()) {
throw new IllegalArgumentException(dir.getName() + " is not a directory");
}
ArrayList<String> allFiles = new ArrayList<String>();
findFiles(dir, "", allFiles);
for (String e : allFiles)
{
File f = new File (e);
if (f.getName().startsWith("."))
{
/* Ignore names starting with '.'. */
continue;
}
if (e.endsWith(".classpath"))
{
/* Ignore. */
continue;
}
if (e.endsWith(".DS_Store"))
{
/* Ignore. */
continue;
}
if (e.endsWith(".prefs"))
{
/* Ignore. */
continue;
}
if (e.endsWith(".class"))
{
/* Ignore. */
continue;
}
if (e.endsWith(".jar"))
{
/* Ignore. */
continue;
}
else if (e.endsWith(".java"))
{
jfs.add(e);
}
else
{
ofs.add(e);
}
}
javaFiles = jfs;
otherFiles = ofs;
}
/**
* Recursively find all the files in the subdirectory.
*
* @param dir - Directory to examine
* @param subdirName - relative pathname for this directory
* @return The list of all files found, including the
* relative path starting from the top-level directory.
* @throws IOException
*/
public void findFiles(File dir, String subdirName, ArrayList<String> foundFiles) throws IOException
{
if (!dir.isDirectory()) {
throw new IllegalArgumentException(subdirName + dir.getName() + " is not a directory");
}
for (File e : dir.listFiles())
{
if (e.getName().startsWith("."))
{
/* Ignore names starting with '.'. */
continue;
}
if (e.isDirectory())
{
/*
* Enter the subdirectory and find its files.
*/
findFiles(e, subdirName + e.getName() + File.separator, foundFiles);
}
else if (e.isFile())
{
if (e.getName().endsWith(".zip"))
{
/*
* Extract and add files to list.
*/
try
{
ArrayList<String> unzippedFiles = Unzip.unzip(e);
for (String s : unzippedFiles)
{
foundFiles.add(subdirName + s);
}
}
catch (ZipException ze)
{
ze.printStackTrace();
}
}
else
{
if (subdirName.length() == 0 && e.getName().endsWith(".java"))
{
System.err.println("Skipping Java file " + subdirName + e.getName() + " in student " + name + " root directory");
}
else
{
foundFiles.add(subdirName + e.getName());
}
}
}
else
{
System.err.println(subdirName + e.getName() + " is not a directory or a file");
}
}
}
/**
* Check the list of java files found in the student's directory.
* Set any missing java files in the missingJavaFiles list.
*
* @param foundFiles - List of Java files found
* @param missingFiles - List of required files that were not found
* @return true if all required files were found, or false otherwise.
*/
public boolean checkRequiredJavaFiles(ProgramInfo[] programs)
{
boolean result = true;
for (ProgramInfo pi : programs)
{
for (AssignmentClasses clss : pi.getClasses())
{
String className = clss.getClassName() + ".java";
boolean fileFound = false;
for (int i = 0; i < userJavaFiles.size() && !fileFound; i++)
{
String foundFile = userJavaFiles.get(i);
int lastSlash = foundFile.lastIndexOf(File.separator);
if (lastSlash != -1)
{
foundFile = foundFile.substring(lastSlash + 1);
}
if (foundFile.equals(className))
{
fileFound = true;
}
}
if (!fileFound)
{
missingFiles.add(className);
result = false;
}
}
}
return result;
}
/**
* Obtain the contents of specified java files copied to the student's root
* directory for inclusion in the report.
*
* @param programs - array of programs classes
*/
public void showRequestedJavaFiles(ProgramInfo[] programs)
{
for (ProgramInfo pi : programs)
{
for (AssignmentClasses clss : pi.getClasses())
{
if (clss.showClass())
{
StringBuffer r = new StringBuffer();
String className = clss.getClassName() + ".java";
String foundFilename = null;
for (int i = 0; i < userJavaFiles.size() && foundFilename == null; i++)
{
String foundFile = userJavaFiles.get(i);
int lastSlash = foundFile.lastIndexOf(File.separator);
if (lastSlash != -1)
{
foundFile = foundFile.substring(lastSlash + 1);
}
if (foundFile.equals(className))
{
foundFilename = dir.getAbsolutePath() + File.separator + foundFile;
}
}
if (foundFilename == null)
{
r.append("---- Java file " + className + " NOT FOUND! ----\n");
}
else
{
r.append("---- Java file " + className + " ----\n");
try
{
r.append(getFileAsString(new File(foundFilename)));
}
catch (IOException e)
{
r.append("Exception " + e.getMessage() + " while reading file " + foundFilename + "\n");
StackTraceElement[] sts = e.getStackTrace();
for (StackTraceElement st : sts)
{
r.append('\t');
r.append(st.toString());
r.append('\n');
}
}
r.append("\n---- End of Java file " + foundFilename + " ----\n");
}
requestedUserJavaFilesContents.put(className, r.toString());
}
}
}
}
/**
* Compile the Java source files under the given directory.
* Return a string describing the results.
*
* @param dir - working directory
* @param sourceFiles - Array of files to compile
* @return status
*/
public boolean compileJavaFiles(File dir) throws IOException, InterruptedException
{
StringBuffer output = new StringBuffer();
if (userJavaFiles.size() == 0)
return true;
Runtime r = Runtime.getRuntime();
String[] cmd = new String[userJavaFiles.size() + 1];
cmd[0] = "javac";
for (int i = 1; i <= userJavaFiles.size(); i++)
cmd[i] = userJavaFiles.get(i - 1);
Process result = r.exec(cmd, null, dir);
Scanner in = new Scanner(result.getErrorStream());
while (in.hasNextLine())
{
//System.out.println("Output from javac: " + in.nextLine());
output.append(in.nextLine() + "\n");
}
in.close();
result.waitFor();
if (output.length() != 0)
{
compilationOutput = output.toString();
return false;
}
else if (result.exitValue() != 0)
{
compilationOutput = "Compiler exit code: " + result.exitValue();
return false;
}
compilationOutput = "OK";
return true;
}
/**
* Run each of the programs that should have been submitted.
*
* @param programs - Programs and run configurations
* @param dir - working directory in which to run the programs
* @throws IOException
* @throws InterruptedException
*/
public void runJavaPrograms(ProgramInfo[] programs, File dir) throws IOException, InterruptedException
{
Runtime r = Runtime.getRuntime();
for (ProgramInfo pi : programs)
{
String program = pi.getName();
String classpath = pi.getClasspath();
URI securityPolicyURI = null;
String securityPolicyFileStr = pi.getSecurityPolicyFile();
if (securityPolicyFileStr != null)
{
File securityPolicyFile = new File(securityPolicyFileStr);
securityPolicyURI = securityPolicyFile.toURI();
}
for (RunConfiguration rc : pi.getRunConfigurations())
{
StringBuffer output = new StringBuffer();
String[] args = rc.getArguments();
String[] properties = rc.getProperties();
int maxArgs = args.length + 2;
if (classpath != null)
{
maxArgs += 2;
}
if (securityPolicyURI != null)
{
maxArgs += 3;
}
if (properties != null)
{
maxArgs += properties.length;
}
int numArgs = 0;
String[] cmd = new String[maxArgs];
cmd[numArgs++] = "java";
if (classpath != null)
{
cmd[numArgs++] = "-classpath";
cmd[numArgs++] = classpath;
}
if (securityPolicyURI != null)
{
// Set parameter grading.base to the directory containing the directory
// that contains the homework submissions.
cmd[numArgs++] = "-Dgrading.base=" + dir.getParentFile().getParent();
// System.out.println("grading.base=" + cmd[numArgs - 1]);
cmd[numArgs++] = "-Djava.security.manager";
cmd[numArgs++] = "-Djava.security.policy==" + securityPolicyURI.toString();
}
if (properties != null)
{
// Add any properties specified in the RunConfiguration
// to the command line.
for (String p : properties)
{
cmd[numArgs++] = String.format("-D%s", p);
}
}
cmd[numArgs++] = program;
System.arraycopy(args, 0, cmd, numArgs, args.length);
numArgs += args.length;
System.out.println("Running " + name + " program " + program);
try
{
/* Setup input for the program, if any. */
BufferedInputStream programInput = null;
FileInputStream inputStream = rc.openInputFile(dir.getAbsolutePath());
if (inputStream != null)
{
programInput = new BufferedInputStream(inputStream);
}
StreamConnector stdinConnector = null;
/*
* Setup output for the program, either to an output file or
* memory buffer.
*/
OutputStream outputStream = rc.openOutputFile(dir.getAbsolutePath());
ByteArrayOutputStream storedOutputStream = null;
if (outputStream == null)
{
/* Collect output in a ByteArrayOutputStream. */
storedOutputStream = new ByteArrayOutputStream(MAX_STD_OUTPUT_SIZE);
outputStream = storedOutputStream;
}
BufferedOutputStream programOutput = new BufferedOutputStream(outputStream);
StreamConnector stdoutConnector = null;
/* Setup error output for the program. */
ByteArrayOutputStream storedErrorStream = new ByteArrayOutputStream(MAX_ERROR_OUTPUT_SIZE);
BufferedOutputStream programError = new BufferedOutputStream(storedErrorStream);
StreamConnector stderrConnector = null;
try
{
final Process process = r.exec(cmd, null, dir);
/*
* Connect and start the threads to copy stdin, stdout,
* and stderr for the child process.
*/
if (programInput != null)
{
stdinConnector = new StreamConnector(programInput, process.getOutputStream(), false, true, "StdIn");
stdinConnector.start();
}
stdoutConnector = new StreamConnector(process.getInputStream(), programOutput, "StdOut");
stdoutConnector.start();
stderrConnector = new StreamConnector(process.getErrorStream(), programError, "StdErr");
stderrConnector.start();
/*
* Setup a timer to expire and terminate the child
* process after a reasonable delay.
*/
Timer t = new Timer();
t.schedule(new TimerTask() {
@Override
public void run() {
process.destroy();
}
}, 15000); // it will kill the process after 15 seconds (if it's not finished yet).
process.waitFor();
t.cancel();
stdoutConnector.join();
stderrConnector.join();
/* Read stdout and stderr into output arraylist. */
if (storedOutputStream != null)
{
if (storedOutputStream.size() == 0)
{
output.append(String.format("----- No output from %s java %s run %s ------\n", name, program, rc.getName()));
}
else
{
output.append(String.format("----- Start of output from %s java %s run %s ------\n", name, program, rc.getName()));
ByteArrayInputStream conv = new ByteArrayInputStream(storedOutputStream.toByteArray());
Scanner in = new Scanner(conv);
while (in.hasNextLine())
{
output.append(in.nextLine());
output.append('\n');
}
in.close();
output.append(String.format("----- End of output from %s java %s run %s ------\n", name, program, rc.getName()));
}
}
if (storedErrorStream != null)
{
if (storedErrorStream.size() != 0)
{
output.append(String.format("----- Start of error output from %s java %s run %s ------\n", name, program, rc.getName()));
ByteArrayInputStream conv = new ByteArrayInputStream(storedErrorStream.toByteArray());
Scanner errIn = new Scanner(conv);
while (errIn.hasNextLine())
{
output.append(errIn.nextLine());
output.append('\n');
}
errIn.close();
output.append(String.format("----- End of error output from %s java %s run %s ------\n", name, program, rc.getName()));
}
}
if (process.exitValue() != 0)
{
output.append(String.format("*** %s java %s run %s exit code %d\n", name, program, rc.getName(), process.exitValue()));
}
programOutputs.put(program + '.' + rc.getName(), output.toString());
}
finally
{
if (programInput != null)
{
programInput.close();
}
if (programOutput != null)
{
programOutput.close();
}
}
}
catch (IOException e)
{
programOutputs.put(program + '.' + rc.getName(), "IOException: " + e.getMessage());
}
catch (InterruptedException e)
{
programOutputs.put(program + '.' + rc.getName(), "InterruptedException: " + e.getMessage());
}
}
}
}
public int hashCode()
{
return name.hashCode();
}
public boolean equals(AssignmentResults ar)
{
return name.equals(ar.name);
}
}
| Exclude database.properties files from comparisons with other
submissions. | src/AssignmentResults.java | Exclude database.properties files from comparisons with other submissions. |
|
Java | bsd-3-clause | 18ad5c20f373a92350c73cc14ea02e40b34bd2a1 | 0 | Serebriakov/micromod,tectronics/micromod,Serebriakov/micromod,tectronics/micromod,Serebriakov/micromod,tectronics/micromod,stark9000/micromod,Serebriakov/micromod,stark9000/micromod,stark9000/micromod,stark9000/micromod,tectronics/micromod |
package micromod.compiler;
/* Compiles textual MT files to Protracker MOD files. */
public class Compiler {
private static final String NOTE_INFO =
"\nAn 8-character ProTracker note takes the form 'KKOIIFPP', where:\n\n" +
" KK = Key (One of 'C-', 'C#', 'D-', 'D#', 'E-', 'F-', 'F#',\n" +
" 'G-', 'G#', 'A-', 'A#', 'B-', or '--').\n" +
" O = Octave (0 to 6, or '-').\n" +
" II = Instrument or macro index (Decimal, 01 to 99, or '--').\n" +
" F = Effect Command (Hexadecimal, 0 to F, or '-').\n" +
" PP = Effect Parameter (Hexadecimal, 00 to FF, or '--').\n\n" +
"For example, an empty note would be '--------', and instrument 1\n" +
"played at middle-c, with no effect would be 'C-201---'.\n" +
"The channel volume and fine-tune are set from the instrument.\n\n" +
"When a macro is played instead of an instrument, it will play until\n" +
"a note with an instrument is encountered. The transpose and volume\n" +
"of a macro can be adjusted by using a key or set volume (effect C)\n" +
"in the pattern. Macros can play over multiple patterns or loop within\n" +
"the same pattern using double patterns, for example 'Pattern 0,1'.\n\n" +
"The available effect/parameter combinations are:\n\n" +
" 0xy = Arpeggio, rapidly cycle through key, key + x, key + y.\n" +
" 1xx = Portamento up with speed xx.\n" +
" 1Fx = Portamento up with speed x semitones per row (Macro only).\n" +
" 2xx = Portamento down with speed xx.\n" +
" 2Fx = Portamento down with speed x semitones per row (Macro only).\n" +
" 3xx = Tone portamento, slide towards the current key at speed xx.\n" +
" 3Fx = Tone portamento with speed x semitones per row (Macro only).\n" +
" 4xy = Vibrato with speed x, depth y.\n" +
" 5xy = Continue tone portamento with volume-slide speed of (x - y).\n" +
" 6xy = Continue vibrato with volume slide speed of (x - y).\n" +
" 7xx = Tremolo with speed x, depth y.\n" +
" 8xx = Set panning 00 to 7F. Not for 4-channel modules.\n" +
" 9xx = Set sample offset to xx * 256 samples.\n" +
" 9Fx = Increase sample offset by x * length / 64 (Macro only).\n" +
" Axy = Volume slide with speed of (x - y).\n" +
" AxF = Linear volume-slide up with speed x (Macro only).\n" +
" AFx = Linear volume-slide down with speed x (Macro only).\n" +
" Bxx = Pattern jump to pattern xx.\n" +
" Cxx = Set volume of instrument or macro to xx (00 to 40).\n" +
" Dnn = Pattern break, to row nn (decimal 00 to 63) of next pattern.\n" +
" E1x = Fine portamento up with speed x.\n" +
" E2x = Fine portamento down with speed x.\n" +
" E3x = Glissando. (Not supported in Micromod).\n" +
" E4x = Set vibrato waveform x (Sine 0, Saw 1, Square 2, Random 3).\n" +
" E5x = Set channel fine-tune x (1 to 7 up, F to 8 down).\n" +
" E60 = Set pattern loop marker.\n" +
" E6x = Pattern loop (replay from the marker x times).\n" +
" E7x = Set tremolo waveform x (Sine 0, Saw 1, Square 2, Random 3).\n" +
" E9x = Retrigger sample every x ticks.\n" +
" EAx = Fine volume-slide up with speed x.\n" +
" EBx = Fine volume-slide down with speed x.\n" +
" ECx = Note cut. Set volume to zero after x ticks.\n" +
" EDx = Note delay. Wait x ticks before triggering key.\n" +
" EEx = Pattern delay. Add x ticks to current row.\n" +
" EFx = Invert loop. (Not supported in Micromod).\n" +
" Fxx = Set speed 00 to 1F (ticks per row, default 6).\n" +
" Fxx = Set tempo 20 to FF (tick length 2.5/xx seconds, default 7D).";
public static void main( String[] args ) throws java.io.IOException {
String mtFile = null, modFile = null, outDir = null, wavFile = null;
boolean printNotes = false, printSyntax = false, interpolation = false;
int[] sequence = null;
int argsIdx = 0, key = 0;
while( argsIdx < args.length ) {
String arg = args[ argsIdx++ ];
if( "-notes".equals( arg ) ) {
printNotes = true;
} else if( "-syntax".equals( arg ) ) {
printSyntax = true;
} else if( "-dir".equals( arg ) ) {
outDir = args[ argsIdx++ ];
} else if( "-hq".equals( arg ) ) {
interpolation = true;
} else if( "-key".equals( arg ) ) {
key = micromod.Note.parseKey( args[ argsIdx++ ] );
} else if( "-mod".equals( arg ) || "-out".equals( arg ) ) {
modFile = args[ argsIdx++ ];
} else if( "-wav".equals( arg ) ) {
wavFile = args[ argsIdx++ ];
} else if( "-seq".equals( arg ) || "-pat".equals( arg ) ) {
sequence = Parser.parseIntegerArray( args[ argsIdx++ ] );
} else {
mtFile = arg;
}
}
if( printNotes ) {
System.out.println( NOTE_INFO );
} else if( printSyntax ) {
System.out.println( Parser.syntax( new Module( null ) ) );
} else if( mtFile != null ) {
if( modFile != null ) {
System.out.println( "Compiling '" + mtFile + "' to module '" + modFile + "'." );
convert( new java.io.File( mtFile ), new java.io.File( modFile ) );
} else {
System.out.println( "Compiling and playing '" + mtFile + "'." );
play( new java.io.File( mtFile ), sequence, interpolation );
}
} else if( modFile != null && ( outDir != null || wavFile != null ) ) {
if( outDir != null ) {
System.out.println( "Extracting module '" + modFile + "' to directory '" + outDir + "'." );
decompile( new micromod.Module( new java.io.FileInputStream( modFile ) ), new java.io.File( outDir ) );
} else {
System.out.println( "Converting module '" + modFile + "' to sample '" + wavFile + "'." );
if( sequence == null || sequence.length < 1 ) {
sequence = new int[ 1 ];
}
if( key < 1 ) {
key = micromod.Note.parseKey( "C-2" );
}
patternToSample( new java.io.File( modFile ), new java.io.File( wavFile ), sequence[ 0 ], key, interpolation );
}
} else {
System.err.println( "Micromod Compiler! (c)2014 [email protected]" );
System.err.println( " Play: input.mt [-hq] [-seq 1,2,3]" );
System.err.println( " Compile: input.mt [-out output.mod]" );
System.err.println( " Decompile: -mod input.mod -dir outputdir" );
System.err.println( " Mod To Sample: -mod input.mod -wav output.wav [-pat 0] [-key C-2] [-hq]" );
System.err.println( " Print MT Syntax: -syntax" );
System.err.println( " Print Note Info: -notes" );
}
}
private static void play( java.io.File mtFile, int[] sequence, boolean interpolation ) throws java.io.IOException {
Module module = compile( mtFile );
if( sequence != null ) {
module.setSequenceLength( sequence.length );
for( int idx = 0; idx < sequence.length; idx++ ) {
module.setSequenceEntry( idx, sequence[ idx ] );
}
}
micromod.Player player = new micromod.Player( module.getModule(), interpolation, false );
System.out.println( player.getModuleInfo() );
Thread thread = new Thread( player );
thread.start();
try {
thread.join();
} catch( InterruptedException e ) {
System.err.println( "Interrupted!" );
}
}
private static void convert( java.io.File mtFile, java.io.File modFile ) throws java.io.IOException {
if( modFile.exists() ) {
throw new IllegalArgumentException( "Output file already exists!" );
}
java.io.OutputStream outputStream = new java.io.FileOutputStream( modFile );
try {
outputStream.write( compile( mtFile ).getModule().save() );
} finally {
outputStream.close();
}
}
private static Module compile( java.io.File mtFile ) throws java.io.IOException {
Module module = new Module( mtFile.getParentFile() );
Parser.parse( new java.io.InputStreamReader( new java.io.FileInputStream( mtFile ) ), module );
return module;
}
private static void decompile( micromod.Module module, java.io.File outDir ) throws java.io.IOException {
if( outDir.exists() ) {
throw new IllegalArgumentException( "Output directory already exists!" );
}
outDir.mkdir();
java.io.Writer writer = new java.io.OutputStreamWriter( new java.io.FileOutputStream( new java.io.File( outDir, "module.mt" ) ) );
try {
writer.write( "Module \"" + nameString( module.getSongName() ) + "\"\n" );
int numChannels = module.getNumChannels();
writer.write( "Channels " + numChannels + "\n" );
writer.write( "Sequence \"" + module.getSequenceEntry( 0 ) );
int numPatterns = 1;
for( int idx = 1, len = module.getSequenceLength(); idx < len; idx++ ) {
int seqEntry = module.getSequenceEntry( idx );
if( seqEntry >= numPatterns ) {
numPatterns = seqEntry + 1;
}
writer.write( "," + seqEntry );
}
writer.write( "\"\n" );
for( int instIdx = 1; instIdx < 32; instIdx++ ) {
micromod.Instrument instrument = module.getInstrument( instIdx );
String name = nameString( instrument.getName() );
int volume = instrument.getVolume();
int fineTune = instrument.getFineTune();
int sampleLength = instrument.getLoopStart() + instrument.getLoopLength();
if( name.length() > 0 || volume > 0 || fineTune > 0 || sampleLength > 2 ) {
writer.write( "\tInstrument " + instIdx + " Name \"" + name + "\"\n" );
if( volume > 0 || fineTune > 0 ) {
writer.write( "\t\tVolume " + instrument.getVolume() + " FineTune " + instrument.getFineTune() + "\n" );
}
if( sampleLength > 2 ) {
String fileName = ( instIdx < 10 ? "0" : "" ) + instIdx + ".wav";
writer.write( "\t\tWaveFile \"" + fileName + "\"\n" );
AudioData audioData = new AudioData( instrument.getSampleData(), module.getC2Rate() );
java.io.OutputStream outputStream = new java.io.FileOutputStream( new java.io.File( outDir, fileName ) );
try {
audioData.writeWav( outputStream, true );
} finally {
outputStream.close();
}
if( instrument.getLoopLength() > 2 ) {
writer.write( "\t\tLoopStart " + instrument.getLoopStart() + " LoopLength " + instrument.getLoopLength() + "\n" );
}
}
}
}
micromod.Note note = new micromod.Note();
for( int patIdx = 0; patIdx < numPatterns; patIdx++ ) {
micromod.Pattern pattern = module.getPattern( patIdx );
writer.write( "\tPattern " + patIdx + "\n" );
for( int rowIdx = 0; rowIdx < 64; rowIdx++ ) {
writer.write( "\t\tRow \"" + ( rowIdx > 9 ? "" : "0" ) + rowIdx );
for( int chanIdx = 0; chanIdx < numChannels; chanIdx++ ) {
pattern.getNote( rowIdx, chanIdx, note );
writer.write( " " + note.toString() );
}
writer.write( "\"\n" );
}
}
writer.write( "(End.)\n" );
} finally {
writer.close();
}
}
private static void patternToSample( java.io.File modFile, java.io.File wavFile, int pattern, int key, boolean interpolation ) throws java.io.IOException {
micromod.Module module = new micromod.Module( new java.io.FileInputStream( modFile ) );
module.setSequenceLength( 1 );
module.setSequenceEntry( 0, pattern );
int samplingRate = module.getC2Rate() * 428 / micromod.Note.keyToPeriod( key, 0 );
micromod.Micromod replay = new micromod.Micromod( module, samplingRate );
replay.setInterpolation( interpolation );
int outLen = replay.calculateSongDuration();
int[] mixBuf = new int[ replay.getMixBufferLength() ];
short[] outBuf = new short[ outLen ];
int outIdx = 0;
while( outIdx < outLen ) {
int mixLen = replay.getAudio( mixBuf );
for( int mixIdx = 0; mixIdx < mixLen; mixIdx++ ) {
int amp = ( mixBuf[ mixIdx * 2 ] + mixBuf[ mixIdx * 2 + 1 ] ) >> 1;
if( amp > 32767 ) {
amp = 32767;
}
if( amp < -32768 ) {
amp = -32768;
}
outBuf[ outIdx++ ] = ( short ) amp;
}
}
if( wavFile.exists() ) {
throw new IllegalArgumentException( "Output file already exists!" );
}
java.io.OutputStream outputStream = new java.io.FileOutputStream( wavFile );
try {
new AudioData( outBuf, samplingRate ).writeWav( outputStream, false );
} finally {
outputStream.close();
}
}
private static String nameString( String str ) {
int length = 0;
char[] out = str.toCharArray();
for( int idx = 0; idx < out.length; idx++ ) {
char chr = out[ idx ];
if( chr == '"' ) {
chr = '\'';
} else if( chr < 32 || ( chr > 126 && chr < 160 ) ) {
chr = 32;
}
if( chr > 32 ) {
length = idx + 1;
}
out[ idx ] = chr;
}
return new String( out, 0, length );
}
}
| micromod/compiler/Compiler.java |
package micromod.compiler;
/* Compiles textual MT files to Protracker MOD files. */
public class Compiler {
public static void main( String[] args ) throws java.io.IOException {
String mtFile = null, modFile = null, outDir = null, wavFile = null;
boolean printSyntax = false, interpolation = false;
int[] sequence = null;
int argsIdx = 0, key = 0;
while( argsIdx < args.length ) {
String arg = args[ argsIdx++ ];
if( "-syntax".equals( arg ) ) {
printSyntax = true;
} else if( "-dir".equals( arg ) ) {
outDir = args[ argsIdx++ ];
} else if( "-hq".equals( arg ) ) {
interpolation = true;
} else if( "-key".equals( arg ) ) {
key = micromod.Note.parseKey( args[ argsIdx++ ] );
} else if( "-mod".equals( arg ) || "-out".equals( arg ) ) {
modFile = args[ argsIdx++ ];
} else if( "-wav".equals( arg ) ) {
wavFile = args[ argsIdx++ ];
} else if( "-seq".equals( arg ) || "-pat".equals( arg ) ) {
sequence = Parser.parseIntegerArray( args[ argsIdx++ ] );
} else {
mtFile = arg;
}
}
if( printSyntax ) {
System.out.println( Parser.syntax( new Module( null ) ) );
} else if( mtFile != null ) {
if( modFile != null ) {
System.out.println( "Compiling '" + mtFile + "' to module '" + modFile + "'." );
convert( new java.io.File( mtFile ), new java.io.File( modFile ) );
} else {
System.out.println( "Compiling and playing '" + mtFile + "'." );
play( new java.io.File( mtFile ), sequence, interpolation );
}
} else if( modFile != null && ( outDir != null || wavFile != null ) ) {
if( outDir != null ) {
System.out.println( "Extracting module '" + modFile + "' to directory '" + outDir + "'." );
decompile( new micromod.Module( new java.io.FileInputStream( modFile ) ), new java.io.File( outDir ) );
} else {
System.out.println( "Converting module '" + modFile + "' to sample '" + wavFile + "'." );
if( sequence == null || sequence.length < 1 ) {
sequence = new int[ 1 ];
}
if( key < 1 ) {
key = micromod.Note.parseKey( "C-2" );
}
patternToSample( new java.io.File( modFile ), new java.io.File( wavFile ), sequence[ 0 ], key, interpolation );
}
} else {
System.err.println( "Micromod Compiler! (c)2014 [email protected]" );
System.err.println( " Play: input.mt [-hq] [-seq 1,2,3]" );
System.err.println( " Compile: input.mt [-out output.mod]" );
System.err.println( " Decompile: -mod input.mod -dir outputdir" );
System.err.println( " Mod To Sample: -mod input.mod -wav output.wav [-pat 0] [-key C-2] [-hq]" );
System.err.println( " Print MT Syntax: -syntax" );
}
}
private static void play( java.io.File mtFile, int[] sequence, boolean interpolation ) throws java.io.IOException {
Module module = compile( mtFile );
if( sequence != null ) {
module.setSequenceLength( sequence.length );
for( int idx = 0; idx < sequence.length; idx++ ) {
module.setSequenceEntry( idx, sequence[ idx ] );
}
}
micromod.Player player = new micromod.Player( module.getModule(), interpolation, false );
System.out.println( player.getModuleInfo() );
Thread thread = new Thread( player );
thread.start();
try {
thread.join();
} catch( InterruptedException e ) {
System.err.println( "Interrupted!" );
}
}
private static void convert( java.io.File mtFile, java.io.File modFile ) throws java.io.IOException {
if( modFile.exists() ) {
throw new IllegalArgumentException( "Output file already exists!" );
}
java.io.OutputStream outputStream = new java.io.FileOutputStream( modFile );
try {
outputStream.write( compile( mtFile ).getModule().save() );
} finally {
outputStream.close();
}
}
private static Module compile( java.io.File mtFile ) throws java.io.IOException {
Module module = new Module( mtFile.getParentFile() );
Parser.parse( new java.io.InputStreamReader( new java.io.FileInputStream( mtFile ) ), module );
return module;
}
private static void decompile( micromod.Module module, java.io.File outDir ) throws java.io.IOException {
if( outDir.exists() ) {
throw new IllegalArgumentException( "Output directory already exists!" );
}
outDir.mkdir();
java.io.Writer writer = new java.io.OutputStreamWriter( new java.io.FileOutputStream( new java.io.File( outDir, "module.mt" ) ) );
try {
writer.write( "Module \"" + nameString( module.getSongName() ) + "\"\n" );
int numChannels = module.getNumChannels();
writer.write( "Channels " + numChannels + "\n" );
writer.write( "Sequence \"" + module.getSequenceEntry( 0 ) );
int numPatterns = 1;
for( int idx = 1, len = module.getSequenceLength(); idx < len; idx++ ) {
int seqEntry = module.getSequenceEntry( idx );
if( seqEntry >= numPatterns ) {
numPatterns = seqEntry + 1;
}
writer.write( "," + seqEntry );
}
writer.write( "\"\n" );
for( int instIdx = 1; instIdx < 32; instIdx++ ) {
micromod.Instrument instrument = module.getInstrument( instIdx );
String name = nameString( instrument.getName() );
int volume = instrument.getVolume();
int fineTune = instrument.getFineTune();
int sampleLength = instrument.getLoopStart() + instrument.getLoopLength();
if( name.length() > 0 || volume > 0 || fineTune > 0 || sampleLength > 2 ) {
writer.write( "\tInstrument " + instIdx + " Name \"" + name + "\"\n" );
if( volume > 0 || fineTune > 0 ) {
writer.write( "\t\tVolume " + instrument.getVolume() + " FineTune " + instrument.getFineTune() + "\n" );
}
if( sampleLength > 2 ) {
String fileName = ( instIdx < 10 ? "0" : "" ) + instIdx + ".wav";
writer.write( "\t\tWaveFile \"" + fileName + "\"\n" );
AudioData audioData = new AudioData( instrument.getSampleData(), module.getC2Rate() );
java.io.OutputStream outputStream = new java.io.FileOutputStream( new java.io.File( outDir, fileName ) );
try {
audioData.writeWav( outputStream, true );
} finally {
outputStream.close();
}
if( instrument.getLoopLength() > 2 ) {
writer.write( "\t\tLoopStart " + instrument.getLoopStart() + " LoopLength " + instrument.getLoopLength() + "\n" );
}
}
}
}
micromod.Note note = new micromod.Note();
for( int patIdx = 0; patIdx < numPatterns; patIdx++ ) {
micromod.Pattern pattern = module.getPattern( patIdx );
writer.write( "\tPattern " + patIdx + "\n" );
for( int rowIdx = 0; rowIdx < 64; rowIdx++ ) {
writer.write( "\t\tRow \"" + ( rowIdx > 9 ? "" : "0" ) + rowIdx );
for( int chanIdx = 0; chanIdx < numChannels; chanIdx++ ) {
pattern.getNote( rowIdx, chanIdx, note );
writer.write( " " + note.toString() );
}
writer.write( "\"\n" );
}
}
writer.write( "(End.)\n" );
} finally {
writer.close();
}
}
private static void patternToSample( java.io.File modFile, java.io.File wavFile, int pattern, int key, boolean interpolation ) throws java.io.IOException {
micromod.Module module = new micromod.Module( new java.io.FileInputStream( modFile ) );
module.setSequenceLength( 1 );
module.setSequenceEntry( 0, pattern );
int samplingRate = module.getC2Rate() * 428 / micromod.Note.keyToPeriod( key, 0 );
micromod.Micromod replay = new micromod.Micromod( module, samplingRate );
replay.setInterpolation( interpolation );
int outLen = replay.calculateSongDuration();
int[] mixBuf = new int[ replay.getMixBufferLength() ];
short[] outBuf = new short[ outLen ];
int outIdx = 0;
while( outIdx < outLen ) {
int mixLen = replay.getAudio( mixBuf );
for( int mixIdx = 0; mixIdx < mixLen; mixIdx++ ) {
int amp = ( mixBuf[ mixIdx * 2 ] + mixBuf[ mixIdx * 2 + 1 ] ) >> 1;
if( amp > 32767 ) {
amp = 32767;
}
if( amp < -32768 ) {
amp = -32768;
}
outBuf[ outIdx++ ] = ( short ) amp;
}
}
if( wavFile.exists() ) {
throw new IllegalArgumentException( "Output file already exists!" );
}
java.io.OutputStream outputStream = new java.io.FileOutputStream( wavFile );
try {
new AudioData( outBuf, samplingRate ).writeWav( outputStream, false );
} finally {
outputStream.close();
}
}
private static String nameString( String str ) {
int length = 0;
char[] out = str.toCharArray();
for( int idx = 0; idx < out.length; idx++ ) {
char chr = out[ idx ];
if( chr == '"' ) {
chr = '\'';
} else if( chr < 32 || ( chr > 126 && chr < 160 ) ) {
chr = 32;
}
if( chr > 32 ) {
length = idx + 1;
}
out[ idx ] = chr;
}
return new String( out, 0, length );
}
}
| Some more documentation. | micromod/compiler/Compiler.java | Some more documentation. |
|
Java | bsd-3-clause | 02572dde50bc4c0b06971a882bd6281204aefcbe | 0 | jamie-dryad/dryad-repo,ojacobson/dryad-repo,jimallman/dryad-repo,rnathanday/dryad-repo,ojacobson/dryad-repo,mdiggory/dryad-repo,jamie-dryad/dryad-repo,rnathanday/dryad-repo,jamie-dryad/dryad-repo,mdiggory/dryad-repo,mdiggory/dryad-repo,ojacobson/dryad-repo,jimallman/dryad-repo,jamie-dryad/dryad-repo,jimallman/dryad-repo,jimallman/dryad-repo,ojacobson/dryad-repo,rnathanday/dryad-repo,mdiggory/dryad-repo,ojacobson/dryad-repo,ojacobson/dryad-repo,jimallman/dryad-repo,jamie-dryad/dryad-repo,mdiggory/dryad-repo,rnathanday/dryad-repo,jimallman/dryad-repo,rnathanday/dryad-repo,rnathanday/dryad-repo | /*
* DCDate.java
*
* Version: $Revision$
*
* Date: $Date$
*
* Copyright (c) 2001, Hewlett-Packard Company and Massachusetts
* Institute of Technology. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* - Neither the name of the Hewlett-Packard Company nor the name of the
* Massachusetts Institute of Technology nor the names of their
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
* TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
* USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*/
package org.dspace.content;
import org.apache.log4j.Category;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.TimeZone;
// FIXME: No tests
// FIXME: Not very robust - assumes dates will always be valid
/**
* Dublin Core date utility class
* <P>
* Dates in the DSpace database are held in the ISO 8601 format. They are
* always stored in UTC, converting to and from the current time zone.
* <P>
* <code>YYYY-MM-DDThh:mm:ss</code>
* <P>
* There are four levels of granularity, depending on how much date information
* is available.
* <P>
* Examples: <code>1994-05-03T15:30:24</code>, <code>1995-10-04</code>,
* <code>2001-10</code>, <code>1975</code>
*
* @author Robert Tansley
* @version $Revision$
*/
public class DCDate
{
/** Logger */
private static Category cat = Category.getInstance(DCDate.class);
/** The year, or -1 if none */
private int year;
/** The month, or -1 if none */
private int month;
/** The day, or -1 if none */
private int day;
/** Hours, -1 if none */
private int hours;
/** Minutes, -1 if none */
private int minutes;
/** seconds, -1 if none */
private int seconds;
/**
* Calendar object for timezone conversion. Only used if the date
* has a time component.
*/
private GregorianCalendar localGC;
/**
* The month names
*/
private final static String[] MONTHNAMES =
{
"January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December"
};
/**
* Construct a clean date
*/
public DCDate()
{
// Set all fields to unknown
year = month = day = hours = minutes = seconds = -1;
localGC = null;
}
/**
* Construct a date from a Dublin Core value
*
* @param fromDC the date string, in ISO 8601 (no timezone,
* always use UTC/GMT)
*/
public DCDate(String fromDC)
{
// Set all fields to unknown
year = month = day = hours = minutes = seconds = -1;
localGC = null;
// An empty date is OK
if (fromDC == null || fromDC.equals(""))
{
return;
}
try
{
switch (fromDC.length())
{
case 19:
// Full date and time
hours = Integer.parseInt(
fromDC.substring(11, 13));
minutes = Integer.parseInt(
fromDC.substring(14, 16));
seconds = Integer.parseInt(
fromDC.substring(17, 19));
case 10:
// Just full date
day = Integer.parseInt(
fromDC.substring(8, 10));
case 7:
// Just year and month
month = Integer.parseInt(
fromDC.substring(5, 7));
case 4:
// Just the year
year = Integer.parseInt(
fromDC.substring(0, 4));
}
}
catch (NumberFormatException e)
{
// Mangled date
cat.warn("Mangled date: " + fromDC + " Exception: " + e);
year = month = day = hours = minutes = seconds = -1;
}
}
/**
* Construct a date object from a Java <code>Date</code> object.
*
* @param date the Java <code>Date</code> object.
*/
public DCDate(Date date)
{
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
// Set all fields
setDateLocal
(
calendar.get(Calendar.YEAR),
// Uses 1 to 12 implementation below instead of Java's
// 0 to 11 convention
calendar.get(Calendar.MONTH) + 1,
calendar.get(Calendar.DAY_OF_MONTH),
calendar.get(Calendar.HOUR_OF_DAY),
calendar.get(Calendar.MINUTE),
calendar.get(Calendar.SECOND)
);
}
/**
* Get a date representing the current instant in time.
*
* @return a DSpaceDate object representing the current instant.
*/
public static DCDate getCurrent()
{
return (new DCDate(new Date()));
}
/**
* Get the date as a string to put back in the Dublin Core
*
* @return The date as a string.
*/
public String toString()
{
StringBuffer sb = new StringBuffer();
if (year > 0)
{
sb.append(year);
}
if (month > 0)
{
sb.append('-').append(fleshOut(month));
}
if (day > 0)
{
sb.append('-').append(fleshOut(day));
}
if (hours > 0)
{
sb.append("T")
.append(fleshOut(hours))
.append(':')
.append(fleshOut(minutes))
.append(':')
.append(fleshOut(seconds));
}
return (sb.toString());
}
/**
* Set the date. The date passed in is assumed to be in the current
* time zone, and is adjusting to fit the current time zone.
* Unknown values should be given as -1.
*
* @param yyyy the year
* @param mm the month
* @param dd the day
* @param hh the hours
* @param mn the minutes
* @param ss the seconds
*/
public void setDateLocal(int yyyy,
int mm,
int dd,
int hh,
int mn,
int ss)
{
year = month = day = hours = minutes = seconds = -1;
if (yyyy > 0)
{
year = yyyy;
}
else
{
return;
}
if (mm > 0)
{
month = mm;
}
else
{
return;
}
if (dd > 0)
{
day = dd;
}
else
{
return;
}
if (hh == -1)
{
return;
}
// We have a time, so we need to do a timezone adjustment
localGC = new GregorianCalendar(year,
month - 1,
day,
hh,
mn,
ss);
// Adjust to UTC
GregorianCalendar utcGC =
new GregorianCalendar(TimeZone.getTimeZone("UTC"));
utcGC.setTime(localGC.getTime());
year = utcGC.get(Calendar.YEAR);
// Notation
month = utcGC.get(Calendar.MONTH) + 1;
day = utcGC.get(Calendar.DAY_OF_MONTH);
hours = utcGC.get(Calendar.HOUR_OF_DAY);
minutes = utcGC.get(Calendar.MINUTE);
seconds = utcGC.get(Calendar.SECOND);
}
/**
* Get the date as an array of ints, adjusted for the current timezone
*
* @return the date an an array:
* ( year, month, day, hour, minute, seconds) - unset
* fields are given a value of -1.
*/
private int[] getDateLocal()
{
// Handle simple date cases first - no timezone adjustment
if (hours == -1)
{
return new int[]{ year, month, day, -1, -1, -1 };
}
// We have a full time, adjust to current timezone
if (localGC == null)
{
GregorianCalendar utcGC = new GregorianCalendar(
TimeZone.getTimeZone("UTC"));
utcGC.set(year, month - 1, day, hours, minutes, seconds);
localGC = new GregorianCalendar();
localGC.setTime(utcGC.getTime());
}
return new int[]{
localGC.get(Calendar.YEAR),
localGC.get(Calendar.MONTH) + 1,
localGC.get(Calendar.DAY_OF_MONTH),
localGC.get(Calendar.HOUR_OF_DAY),
localGC.get(Calendar.MINUTE),
localGC.get(Calendar.SECOND) };
}
/**
* Get the year, adjusting for current time zone.
*
* @return the year
*/
public int getYear()
{
return (getDateLocal())[0];
}
/**
* Get the month, adjusting for current time zone.
*
* @return the month
*/
public int getMonth()
{
return (getDateLocal())[1];
}
/**
* Get the day, adjusting for current time zone.
*
* @return the day
*/
public int getDay()
{
return (getDateLocal())[2];
}
/**
* Get the hour, adjusting for current time zone.
*
* @return the hour
*/
public int getHour()
{
return (getDateLocal())[3];
}
/**
* Get the minute, adjusting for current time zone.
*
* @return the minute
*/
public int getMinute()
{
return (getDateLocal())[4];
}
/**
* Get the second, adjusting for current time zone.
*
* @return the second
*/
public int getSecond()
{
return (getDateLocal())[5];
}
/**
* Get the date as an array of ints in GMT
*
* @return the date an an array:
* ( year, month, day, hour, minute, seconds) - unset
* fields are given a value of -1.
*/
private int[] getDateGMT()
{
return new int[]{ year, month, day, hours, minutes, seconds };
}
/**
* Get the year in GMT.
*
* @return the year
*/
public int getYearGMT()
{
return (getDateGMT())[0];
}
/**
* Get the month in GMT.
*
* @return the month
*/
public int getMonthGMT()
{
return (getDateGMT())[1];
}
/**
* Get the day in GMT.
*
* @return the day
*/
public int getDayGMT()
{
return (getDateGMT())[2];
}
/**
* Get the hour in GMT.
*
* @return the hour
*/
public int getHourGMT()
{
return (getDateGMT())[3];
}
/**
* Get the minute in GMT.
*
* @return the minute
*/
public int getMinuteGMT()
{
return (getDateGMT())[4];
}
/**
* Get the second in GMT.
*
* @return the second
*/
public int getSecondGMT()
{
return (getDateGMT())[5];
}
/**
* Flesh out a number to two digits
*
* @param n the number
* @return the number as a two-digit string
*/
private String fleshOut(int n)
{
if (n < 10)
{
return "0" + n;
}
else
{
return String.valueOf(n);
}
}
/**
* Get a month's name for a month between 1 and 12. Any invalid
* month value (e.g. 0 or -1) will return a value of "Unspecified".
*
* @param m the month number
*
* @return the month name.
*/
public static String getMonthName(int m)
{
if (m > 0 && m < 13)
{
return MONTHNAMES[m - 1];
}
else
{
return "Unspecified";
}
}
}
| dspace/src/org/dspace/content/DCDate.java | /*
* DCDate.java
*
* Version: $Revision$
*
* Date: $Date$
*
* Copyright (c) 2001, Hewlett-Packard Company and Massachusetts
* Institute of Technology. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* - Neither the name of the Hewlett-Packard Company nor the name of the
* Massachusetts Institute of Technology nor the names of their
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
* TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
* USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*/
package org.dspace.content;
import org.apache.log4j.Category;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.TimeZone;
// FIXME: No tests
// FIXME: Not very robust - assumes dates will always be valid
/**
* Dublin Core date utility class
* <P>
* Dates in the DSpace database are held in the ISO 8601 format. They are
* always stored in UTC, converting to and from the current time zone.
* <P>
* <code>YYYY-MM-DDThh:mm:ss</code>
* <P>
* There are four levels of granularity, depending on how much date information
* is available.
* <P>
* Examples: <code>1994-05-03T15:30:24</code>, <code>1995-10-04</code>,
* <code>2001-10</code>, <code>1975</code>
*
* @author Robert Tansley
* @version $Revision$
*/
public class DCDate
{
/** Logger */
private static Category cat = Category.getInstance(
"org.dspace.util.DSpaceDate");
/** The year, or -1 if none */
private int year;
/** The month, or -1 if none */
private int month;
/** The day, or -1 if none */
private int day;
/** Hours, -1 if none */
private int hours;
/** Minutes, -1 if none */
private int minutes;
/** seconds, -1 if none */
private int seconds;
/**
* Calendar object for timezone conversion. Only used if the date
* has a time component.
*/
private GregorianCalendar localGC;
/**
* The month names
*/
private final static String[] MONTHNAMES =
{
"January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December"
};
/**
* Construct a clean date
*/
public DCDate()
{
// Set all fields to unknown
year = month = day = hours = minutes = seconds = -1;
localGC = null;
}
/**
* Construct a date from a Dublin Core value
*
* @param fromDC the date string, in ISO 8601 (no timezone,
* always use UTC/GMT)
*/
public DCDate(String fromDC)
{
// Set all fields to unknown
year = month = day = hours = minutes = seconds = -1;
localGC = null;
// An empty date is OK
if (fromDC == null || fromDC.equals(""))
{
return;
}
try
{
switch (fromDC.length())
{
case 19:
// Full date and time
hours = Integer.parseInt(
fromDC.substring(11, 13));
minutes = Integer.parseInt(
fromDC.substring(14, 16));
seconds = Integer.parseInt(
fromDC.substring(17, 19));
case 10:
// Just full date
day = Integer.parseInt(
fromDC.substring(8, 10));
case 7:
// Just year and month
month = Integer.parseInt(
fromDC.substring(5, 7));
case 4:
// Just the year
year = Integer.parseInt(
fromDC.substring(0, 4));
}
}
catch (NumberFormatException e)
{
// Mangled date
cat.warn("Mangled date: " + fromDC + " Exception: " + e);
year = month = day = hours = minutes = seconds = -1;
}
}
/**
* Construct a date object from a Java <code>Date</code> object.
*
* @param date the Java <code>Date</code> object.
*/
public DCDate(Date date)
{
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
// Set all fields
setDateLocal
(
calendar.get(Calendar.YEAR),
// Uses 1 to 12 implementation below instead of Java's
// 0 to 11 convention
calendar.get(Calendar.MONTH) + 1,
calendar.get(Calendar.DAY_OF_MONTH),
calendar.get(Calendar.HOUR_OF_DAY),
calendar.get(Calendar.MINUTE),
calendar.get(Calendar.SECOND)
);
}
/**
* Get a date representing the current instant in time.
*
* @return a DSpaceDate object representing the current instant.
*/
public static DCDate getCurrent()
{
return (new DCDate(new Date()));
}
/**
* Get the date as a string to put back in the Dublin Core
*
* @return The date as a string.
*/
public String toString()
{
StringBuffer sb = new StringBuffer();
if (year > 0)
{
sb.append(year);
}
if (month > 0)
{
sb.append('-').append(fleshOut(month));
}
if (day > 0)
{
sb.append('-').append(fleshOut(day));
}
if (hours > 0)
{
sb.append("T")
.append(fleshOut(hours))
.append(':')
.append(fleshOut(minutes))
.append(':')
.append(fleshOut(seconds));
}
return (sb.toString());
}
/**
* Set the date. The date passed in is assumed to be in the current
* time zone, and is adjusting to fit the current time zone.
* Unknown values should be given as -1.
*
* @param yyyy the year
* @param mm the month
* @param dd the day
* @param hh the hours
* @param mn the minutes
* @param ss the seconds
*/
public void setDateLocal(int yyyy,
int mm,
int dd,
int hh,
int mn,
int ss)
{
year = month = day = hours = minutes = seconds = -1;
if (yyyy > 0)
{
year = yyyy;
}
else
{
return;
}
if (mm > 0)
{
month = mm;
}
else
{
return;
}
if (dd > 0)
{
day = dd;
}
else
{
return;
}
if (hh == -1)
{
return;
}
// We have a time, so we need to do a timezone adjustment
localGC = new GregorianCalendar(year,
month - 1,
day,
hh,
mn,
ss);
// Adjust to UTC
GregorianCalendar utcGC =
new GregorianCalendar(TimeZone.getTimeZone("UTC"));
utcGC.setTime(localGC.getTime());
year = utcGC.get(Calendar.YEAR);
// Notation
month = utcGC.get(Calendar.MONTH) + 1;
day = utcGC.get(Calendar.DAY_OF_MONTH);
hours = utcGC.get(Calendar.HOUR_OF_DAY);
minutes = utcGC.get(Calendar.MINUTE);
seconds = utcGC.get(Calendar.SECOND);
}
/**
* Get the date as an array of ints, adjusted for the current timezone
*
* @return the date an an array:
* ( year, month, day, hour, minute, seconds) - unset
* fields are given a value of -1.
*/
private int[] getDateLocal()
{
// Handle simple date cases first - no timezone adjustment
if (hours == -1)
{
return new int[]{ year, month, day, -1, -1, -1 };
}
// We have a full time, adjust to current timezone
if (localGC == null)
{
GregorianCalendar utcGC = new GregorianCalendar(
TimeZone.getTimeZone("UTC"));
utcGC.set(year, month - 1, day, hours, minutes, seconds);
localGC = new GregorianCalendar();
localGC.setTime(utcGC.getTime());
}
return new int[]{
localGC.get(Calendar.YEAR),
localGC.get(Calendar.MONTH) + 1,
localGC.get(Calendar.DAY_OF_MONTH),
localGC.get(Calendar.HOUR_OF_DAY),
localGC.get(Calendar.MINUTE),
localGC.get(Calendar.SECOND) };
}
/**
* Get the year, adjusting for current time zone.
*
* @return the year
*/
public int getYear()
{
return (getDateLocal())[0];
}
/**
* Get the month, adjusting for current time zone.
*
* @return the month
*/
public int getMonth()
{
return (getDateLocal())[1];
}
/**
* Get the day, adjusting for current time zone.
*
* @return the day
*/
public int getDay()
{
return (getDateLocal())[2];
}
/**
* Get the hour, adjusting for current time zone.
*
* @return the hour
*/
public int getHour()
{
return (getDateLocal())[3];
}
/**
* Get the minute, adjusting for current time zone.
*
* @return the minute
*/
public int getMinute()
{
return (getDateLocal())[4];
}
/**
* Get the second, adjusting for current time zone.
*
* @return the second
*/
public int getSecond()
{
return (getDateLocal())[5];
}
/**
* Get the date as an array of ints in GMT
*
* @return the date an an array:
* ( year, month, day, hour, minute, seconds) - unset
* fields are given a value of -1.
*/
private int[] getDateGMT()
{
return new int[]{ year, month, day, hours, minutes, seconds };
}
/**
* Get the year in GMT.
*
* @return the year
*/
public int getYearGMT()
{
return (getDateGMT())[0];
}
/**
* Get the month in GMT.
*
* @return the month
*/
public int getMonthGMT()
{
return (getDateGMT())[1];
}
/**
* Get the day in GMT.
*
* @return the day
*/
public int getDayGMT()
{
return (getDateGMT())[2];
}
/**
* Get the hour in GMT.
*
* @return the hour
*/
public int getHourGMT()
{
return (getDateGMT())[3];
}
/**
* Get the minute in GMT.
*
* @return the minute
*/
public int getMinuteGMT()
{
return (getDateGMT())[4];
}
/**
* Get the second in GMT.
*
* @return the second
*/
public int getSecondGMT()
{
return (getDateGMT())[5];
}
/**
* Flesh out a number to two digits
*
* @param n the number
* @return the number as a two-digit string
*/
private String fleshOut(int n)
{
if (n < 10)
{
return "0" + n;
}
else
{
return String.valueOf(n);
}
}
/**
* Get a month's name for a month between 1 and 12. Any invalid
* month value (e.g. 0 or -1) will return a value of "Unspecified".
*
* @param m the month number
*
* @return the month name.
*/
public static String getMonthName(int m)
{
if (m > 0 && m < 13)
{
return MONTHNAMES[m - 1];
}
else
{
return "Unspecified";
}
}
}
| Fix bogus log4j category
git-svn-id: 39c64a9546defcc59b5f71fe8fe20b2d01c24c1f@218 9c30dcfa-912a-0410-8fc2-9e0234be79fd
| dspace/src/org/dspace/content/DCDate.java | Fix bogus log4j category |
|
Java | apache-2.0 | 2829166b2005c371c5bf9da1d4e94d09380d1001 | 0 | capesonlee/tangtang-spring-cloud,capesonlee/tangtang-spring-cloud | order-service/src/main/java/com/lijuyong/startup/controller/OrderStat.java | package com.lijuyong.startup.controller;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* Created by john on 2017/2/24.
*/
@RestController
@RequestMapping("/stat")
public class OrderStat {
@Value("${from}")
private String from;
@RequestMapping("/count")
public int count(){
return 2046;
}
@RequestMapping("/config")
public String config(){
return from;
}
}
| 删除误提交的文件
| order-service/src/main/java/com/lijuyong/startup/controller/OrderStat.java | 删除误提交的文件 |
||
Java | mit | 8b39ed886d186fe2a16a769d83514f82a470a484 | 0 | anudeepsharma/azure-sdk-for-java,Azure/azure-sdk-for-java,navalev/azure-sdk-for-java,herveyw/azure-sdk-for-java,pomortaz/azure-sdk-for-java,hovsepm/azure-sdk-for-java,herveyw/azure-sdk-for-java,pomortaz/azure-sdk-for-java,martinsawicki/azure-sdk-for-java,selvasingh/azure-sdk-for-java,selvasingh/azure-sdk-for-java,hovsepm/azure-sdk-for-java,herveyw/azure-sdk-for-java,jianghaolu/azure-sdk-for-java,anudeepsharma/azure-sdk-for-java,Azure/azure-sdk-for-java,hovsepm/azure-sdk-for-java,martinsawicki/azure-sdk-for-java,pomortaz/azure-sdk-for-java,Azure/azure-sdk-for-java,herveyw/azure-sdk-for-java,selvasingh/azure-sdk-for-java,jalves94/azure-sdk-for-java,jalves94/azure-sdk-for-java,Azure/azure-sdk-for-java,jalves94/azure-sdk-for-java,navalev/azure-sdk-for-java,anudeepsharma/azure-sdk-for-java,anudeepsharma/azure-sdk-for-java,jalves94/azure-sdk-for-java,anudeepsharma/azure-sdk-for-java,hovsepm/azure-sdk-for-java,ljhljh235/azure-sdk-for-java,pomortaz/azure-sdk-for-java,ljhljh235/azure-sdk-for-java,Azure/azure-sdk-for-java,martinsawicki/azure-sdk-for-java,hovsepm/azure-sdk-for-java,navalev/azure-sdk-for-java,navalev/azure-sdk-for-java,navalev/azure-sdk-for-java,jianghaolu/azure-sdk-for-java,jianghaolu/azure-sdk-for-java | /**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*/
package com.microsoft.azure.management.network;
import com.microsoft.azure.management.apigeneration.LangDefinition;
import com.microsoft.azure.management.apigeneration.LangDefinition.MethodConversion;
import com.microsoft.azure.management.network.implementation.InboundNatPoolInner;
import com.microsoft.azure.management.network.model.HasBackendPort;
import com.microsoft.azure.management.network.model.HasFrontend;
import com.microsoft.azure.management.network.model.HasProtocol;
import com.microsoft.azure.management.resources.fluentcore.arm.models.ChildResource;
import com.microsoft.azure.management.resources.fluentcore.model.Attachable;
import com.microsoft.azure.management.resources.fluentcore.model.Settable;
import com.microsoft.azure.management.resources.fluentcore.model.Wrapper;
/**
* An immutable client-side representation of an inbound NAT rule.
*/
@LangDefinition()
public interface InboundNatPool extends
HasFrontend,
HasBackendPort,
HasProtocol<TransportProtocol>,
Wrapper<InboundNatPoolInner>,
ChildResource<LoadBalancer> {
/**
* @return the starting frontend port number
*/
int frontendPortRangeStart();
/**
* @return the ending frontend port number
*/
int frontendPortRangeEnd();
/**
* Grouping of inbound NAT pool definition stages.
*/
@LangDefinition(ContainerName = "Definition", ContainerFileName = "IDefinition", IsContainerOnly = true, MethodConversionType = MethodConversion.OnlyMethod)
interface DefinitionStages {
/**
* The first stage of the inbound NAT pool definition.
* @param <ParentT> the return type of the final {@link WithAttach#attach()}
*/
interface Blank<ParentT> extends WithProtocol<ParentT> {
}
/**
* The final stage of the inbound NAT pool definition.
* <p>
* At this stage, any remaining optional settings can be specified, or the inbound NAT pool definition
* can be attached to the parent load balancer definition using {@link WithAttach#attach()}.
* @param <ParentT> the return type of {@link WithAttach#attach()}
*/
interface WithAttach<ParentT> extends
Attachable.InDefinition<ParentT> {
}
/**
* The stage of an inbound NAT pool definition allowing to specify the transport protocol for the pool to apply to.
* @param <ParentT> the parent load balancer type
*/
interface WithProtocol<ParentT> extends
HasProtocol.DefinitionStages.WithProtocol<WithFrontend<ParentT>, TransportProtocol> {
}
/**
* The stage of an inbound NAT pool definition allowing to specify the frontend for the inbound NAT rules in the pool to apply to.
* @param <ParentT> the parent load balancer type
*/
interface WithFrontend<ParentT> extends
HasFrontend.DefinitionStages.WithFrontend<WithFrontendPortRange<ParentT>> {
}
/**
* The stage of an inbound NAT pool definition allowing to specify the frontend port range.
* @param <ParentT> the parent load balancer type
*/
interface WithFrontendPortRange<ParentT> {
/**
* Specifies the frontend port range.
* @param from the starting port number, between 1 and 65534
* @param to the ending port number, greater than the starting port number and no more than 65534
* @return the next stage of the definition
*/
WithBackendPort<ParentT> withFrontendPortRange(int from, int to);
}
/**
* The stage of an inbound NAT pool definition allowing to specify the backend port.
* @param <ParentT> the parent load balancer type
*/
interface WithBackendPort<ParentT> extends
HasBackendPort.DefinitionStages.WithBackendPort<WithAttach<ParentT>> {
}
}
/** The entirety of an inbound NAT pool definition.
* @param <ParentT> the return type of the final {@link DefinitionStages.WithAttach#attach()}
*/
@LangDefinition()
interface Definition<ParentT> extends
DefinitionStages.Blank<ParentT>,
DefinitionStages.WithAttach<ParentT>,
DefinitionStages.WithProtocol<ParentT>,
DefinitionStages.WithFrontend<ParentT>,
DefinitionStages.WithFrontendPortRange<ParentT>,
DefinitionStages.WithBackendPort<ParentT> {
}
/**
* Grouping of inbound NAT pool update stages.
*/
@LangDefinition(ContainerName = "Update", ContainerFileName = "IUpdate", IsContainerOnly = true)
interface UpdateStages {
/**
* The stage of an inbound NAT pool update allowing to specify the transport protocol for the pool to apply to.
*/
interface WithProtocol extends
HasProtocol.UpdateStages.WithProtocol<Update, TransportProtocol> {
}
/**
* The stage of an inbound NAT pool update allowing to specify the frontend for the inbound NAT rules in the pool to apply to.
*/
interface WithFrontend extends
HasFrontend.UpdateStages.WithFrontend<Update> {
}
/**
* The stage of an inbound NAT pool update allowing to specify the frontend port range.
*/
interface WithFrontendPortRange {
/**
* Specifies the frontend port range.
* @param from the starting port number, between 1 and 65534
* @param to the ending port number, greater than the starting port number and no more than 65534
* @return the next stage of the definition
*/
Update withFrontendPortRange(int from, int to);
}
/**
* The stage of an inbound NAT pool update allowing to specify the backend port.
*/
interface WithBackendPort extends
HasBackendPort.UpdateStages.WithBackendPort<Update> {
}
}
/**
* The entirety of an inbound NAT pool update as part of a load balancer update.
*/
@LangDefinition(MethodConversionType = MethodConversion.OnlyMethod)
interface Update extends
Settable<LoadBalancer.Update>,
UpdateStages.WithProtocol,
UpdateStages.WithFrontend,
UpdateStages.WithBackendPort,
UpdateStages.WithFrontendPortRange {
}
/**
* Grouping of inbound NAT pool definition stages applicable as part of a load balancer update.
*/
@LangDefinition(ContainerName = "UpdateDefinition", ContainerFileName = "IUpdateDefinition", IsContainerOnly = true, MethodConversionType = MethodConversion.OnlyMethod)
interface UpdateDefinitionStages {
/**
* The first stage of the inbound NAT pool definition.
* @param <ParentT> the return type of the final {@link WithAttach#attach()}
*/
interface Blank<ParentT> extends WithProtocol<ParentT> {
}
/**
* The final stage of the inbound NAT pool definition.
* <p>
* At this stage, any remaining optional settings can be specified, or the inbound NAT pool
* definition can be attached to the parent load balancer definition using {@link WithAttach#attach()}.
* @param <ParentT> the return type of {@link WithAttach#attach()}
*/
interface WithAttach<ParentT> extends
Attachable.InUpdate<ParentT> {
}
/**
* The stage of an inbound NAT pool definition allowing to specify the transport protocol for the pool to apply to.
* @param <ParentT> the parent load balancer type
*/
interface WithProtocol<ParentT> extends
HasProtocol.UpdateDefinitionStages.WithProtocol<WithAttach<ParentT>, TransportProtocol> {
}
/**
* The stage of an inbound NAT pool definition allowing to specify the frontend for the inbound NAT rules in the pool to apply to.
* @param <ParentT> the parent load balancer type
*/
interface WithFrontend<ParentT> extends
HasFrontend.UpdateDefinitionStages.WithFrontend<WithAttach<ParentT>> {
}
/**
* The stage of an inbound NAT pool definition allowing to specify the frontend port range.
* @param <ParentT> the parent load balancer type
*/
interface WithFrontendPortRange<ParentT> {
/**
* Specified the frontend port range.
* @param from the starting port number, between 1 and 65534
* @param to the ending port number, greater than the starting port number and no more than 65534
* @return the next stage of the definition
*/
WithAttach<ParentT> withFrontendPortRange(int from, int to);
}
/**
* The stage of an inbound NAT pool definition allowing to specify the backend port.
* @param <ParentT> the parent load balancer type
*/
interface WithBackendPort<ParentT> extends
HasBackendPort.UpdateDefinitionStages.WithBackendPort<WithAttach<ParentT>> {
}
}
/** The entirety of an inbound NAT pool definition as part of a load balancer update.
* @param <ParentT> the return type of the final {@link UpdateDefinitionStages.WithAttach#attach()}
*/
@LangDefinition()
interface UpdateDefinition<ParentT> extends
UpdateDefinitionStages.Blank<ParentT>,
UpdateDefinitionStages.WithAttach<ParentT>,
UpdateDefinitionStages.WithProtocol<ParentT>,
UpdateDefinitionStages.WithFrontend<ParentT>,
UpdateDefinitionStages.WithFrontendPortRange<ParentT>,
UpdateDefinitionStages.WithBackendPort<ParentT> {
}
}
| azure-mgmt-network/src/main/java/com/microsoft/azure/management/network/InboundNatPool.java | /**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*/
package com.microsoft.azure.management.network;
import com.microsoft.azure.management.apigeneration.LangDefinition;
import com.microsoft.azure.management.apigeneration.LangDefinition.MethodConversion;
import com.microsoft.azure.management.network.implementation.InboundNatPoolInner;
import com.microsoft.azure.management.network.model.HasBackendPort;
import com.microsoft.azure.management.network.model.HasFrontend;
import com.microsoft.azure.management.network.model.HasProtocol;
import com.microsoft.azure.management.resources.fluentcore.arm.models.ChildResource;
import com.microsoft.azure.management.resources.fluentcore.model.Attachable;
import com.microsoft.azure.management.resources.fluentcore.model.Settable;
import com.microsoft.azure.management.resources.fluentcore.model.Wrapper;
/**
* An immutable client-side representation of an inbound NAT rule.
*/
@LangDefinition()
public interface InboundNatPool extends
HasFrontend,
HasBackendPort,
HasProtocol<TransportProtocol>,
Wrapper<InboundNatPoolInner>,
ChildResource<LoadBalancer> {
/**
* @return the starting frontend port number
*/
int frontendPortRangeStart();
/**
* @return the ending frontend port number
*/
int frontendPortRangeEnd();
/**
* Grouping of inbound NAT pool definition stages.
*/
@LangDefinition(ContainerName = "Definition", ContainerFileName = "IDefinition", IsContainerOnly = true, MethodConversionType = MethodConversion.OnlyMethod)
interface DefinitionStages {
/**
* The first stage of the inbound NAT pool definition.
* @param <ParentT> the return type of the final {@link WithAttach#attach()}
*/
interface Blank<ParentT> extends WithProtocol<ParentT> {
}
/**
* The final stage of the inbound NAT pool definition.
* <p>
* At this stage, any remaining optional settings can be specified, or the inbound NAT pool definition
* can be attached to the parent load balancer definition using {@link WithAttach#attach()}.
* @param <ParentT> the return type of {@link WithAttach#attach()}
*/
interface WithAttach<ParentT> extends
Attachable.InDefinition<ParentT> {
}
/**
* The stage of an inbound NAT pool definition allowing to specify the transport protocol for the pool to apply to.
* @param <ParentT> the parent load balancer type
*/
interface WithProtocol<ParentT> extends
HasProtocol.DefinitionStages.WithProtocol<WithFrontend<ParentT>, TransportProtocol> {
}
/**
* The stage of an inbound NAT pool definition allowing to specify the frontend for the inbound NAT rules in the pool to apply to.
* @param <ParentT> the parent load balancer type
*/
interface WithFrontend<ParentT> extends
HasFrontend.DefinitionStages.WithFrontend<WithFrontendPortRange<ParentT>> {
}
/**
* The stage of an inbound NAT pool definition allowing to specify the frontend port range.
* @param <ParentT> the parent load balancer type
*/
interface WithFrontendPortRange<ParentT> {
/**
* Specifies the frontend port range.
* @param from the starting port number, between 1 and 65534
* @param to the ending port number, greater than the starting port number and no more than 65534
* @return the next stage of the definition
*/
WithBackendPort<ParentT> withFrontendPortRange(int from, int to);
}
/**
* The stage of an inbound NAT pool definition allowing to specify the backend port.
* @param <ParentT> the parent load balancer type
*/
interface WithBackendPort<ParentT> extends
HasBackendPort.DefinitionStages.WithBackendPort<WithAttach<ParentT>> {
}
}
/** The entirety of an inbound NAT pool definition.
* @param <ParentT> the return type of the final {@link DefinitionStages.WithAttach#attach()}
*/
@LangDefinition(ContainerName = "Definition", ContainerFileName = "IDefinition")
interface Definition<ParentT> extends
DefinitionStages.Blank<ParentT>,
DefinitionStages.WithAttach<ParentT>,
DefinitionStages.WithProtocol<ParentT>,
DefinitionStages.WithFrontend<ParentT>,
DefinitionStages.WithFrontendPortRange<ParentT>,
DefinitionStages.WithBackendPort<ParentT> {
}
/**
* Grouping of inbound NAT pool update stages.
*/
@LangDefinition(ContainerName = "Update", ContainerFileName = "IUpdate", IsContainerOnly = true)
interface UpdateStages {
/**
* The stage of an inbound NAT pool update allowing to specify the transport protocol for the pool to apply to.
*/
interface WithProtocol extends
HasProtocol.UpdateStages.WithProtocol<Update, TransportProtocol> {
}
/**
* The stage of an inbound NAT pool update allowing to specify the frontend for the inbound NAT rules in the pool to apply to.
*/
interface WithFrontend extends
HasFrontend.UpdateStages.WithFrontend<Update> {
}
/**
* The stage of an inbound NAT pool update allowing to specify the frontend port range.
*/
interface WithFrontendPortRange {
/**
* Specifies the frontend port range.
* @param from the starting port number, between 1 and 65534
* @param to the ending port number, greater than the starting port number and no more than 65534
* @return the next stage of the definition
*/
Update withFrontendPortRange(int from, int to);
}
/**
* The stage of an inbound NAT pool update allowing to specify the backend port.
*/
interface WithBackendPort extends
HasBackendPort.UpdateStages.WithBackendPort<Update> {
}
}
/**
* The entirety of an inbound NAT pool update as part of a load balancer update.
*/
@LangDefinition(ContainerName = "Update", ContainerFileName = "IUpdate", MethodConversionType = MethodConversion.OnlyMethod)
interface Update extends
Settable<LoadBalancer.Update>,
UpdateStages.WithProtocol,
UpdateStages.WithFrontend,
UpdateStages.WithBackendPort,
UpdateStages.WithFrontendPortRange {
}
/**
* Grouping of inbound NAT pool definition stages applicable as part of a load balancer update.
*/
@LangDefinition(ContainerName = "UpdateDefinition", ContainerFileName = "IUpdateDefinition", IsContainerOnly = true, MethodConversionType = MethodConversion.OnlyMethod)
interface UpdateDefinitionStages {
/**
* The first stage of the inbound NAT pool definition.
* @param <ParentT> the return type of the final {@link WithAttach#attach()}
*/
interface Blank<ParentT> extends WithProtocol<ParentT> {
}
/**
* The final stage of the inbound NAT pool definition.
* <p>
* At this stage, any remaining optional settings can be specified, or the inbound NAT pool
* definition can be attached to the parent load balancer definition using {@link WithAttach#attach()}.
* @param <ParentT> the return type of {@link WithAttach#attach()}
*/
interface WithAttach<ParentT> extends
Attachable.InUpdate<ParentT> {
}
/**
* The stage of an inbound NAT pool definition allowing to specify the transport protocol for the pool to apply to.
* @param <ParentT> the parent load balancer type
*/
interface WithProtocol<ParentT> extends
HasProtocol.UpdateDefinitionStages.WithProtocol<WithAttach<ParentT>, TransportProtocol> {
}
/**
* The stage of an inbound NAT pool definition allowing to specify the frontend for the inbound NAT rules in the pool to apply to.
* @param <ParentT> the parent load balancer type
*/
interface WithFrontend<ParentT> extends
HasFrontend.UpdateDefinitionStages.WithFrontend<WithAttach<ParentT>> {
}
/**
* The stage of an inbound NAT pool definition allowing to specify the frontend port range.
* @param <ParentT> the parent load balancer type
*/
interface WithFrontendPortRange<ParentT> {
/**
* Specified the frontend port range.
* @param from the starting port number, between 1 and 65534
* @param to the ending port number, greater than the starting port number and no more than 65534
* @return the next stage of the definition
*/
WithAttach<ParentT> withFrontendPortRange(int from, int to);
}
/**
* The stage of an inbound NAT pool definition allowing to specify the backend port.
* @param <ParentT> the parent load balancer type
*/
interface WithBackendPort<ParentT> extends
HasBackendPort.UpdateDefinitionStages.WithBackendPort<WithAttach<ParentT>> {
}
}
/** The entirety of an inbound NAT pool definition as part of a load balancer update.
* @param <ParentT> the return type of the final {@link UpdateDefinitionStages.WithAttach#attach()}
*/
@LangDefinition(ContainerName = "UpdateDefinition", ContainerFileName = "IUpdateDefinition")
interface UpdateDefinition<ParentT> extends
UpdateDefinitionStages.Blank<ParentT>,
UpdateDefinitionStages.WithAttach<ParentT>,
UpdateDefinitionStages.WithProtocol<ParentT>,
UpdateDefinitionStages.WithFrontend<ParentT>,
UpdateDefinitionStages.WithFrontendPortRange<ParentT>,
UpdateDefinitionStages.WithBackendPort<ParentT> {
}
}
| simplifying inbound NAT pool lang annotations
| azure-mgmt-network/src/main/java/com/microsoft/azure/management/network/InboundNatPool.java | simplifying inbound NAT pool lang annotations |
|
Java | mit | ff5b26c8fd533006c2a5a3460afdf07ae1e084b6 | 0 | gscrot/gscrot | package com.redpois0n.guiscrot;
import iconlib.IconUtils;
import java.awt.Color;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Image;
import java.awt.Rectangle;
import java.awt.RenderingHints;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseMotionListener;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import javax.imageio.ImageIO;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.event.MouseInputListener;
@SuppressWarnings("serial")
public class CoverFrame extends JFrame implements KeyListener, MouseMotionListener, MouseInputListener {
public static final int PREVIEW_SIZE = 150;
public static final float PREVIEW_SCALE = 8F;
public static final float OPACITY = 0.5F;
private Rectangle rect;
private Image image;
private int x;
private int y;
private int x2;
private int y2;
private boolean dragging;
private int seed;
private RepaintThread thread;
public CoverFrame(Rectangle rect) {
this(rect, null);
}
public CoverFrame(Rectangle rect, Image image) {
this.rect = rect;
this.image = image;
setUndecorated(true);
setBounds(rect);
addMouseListener(this);
addMouseMotionListener(this);
CoverPanel cp = new CoverPanel();
cp.addKeyListener(this);
setContentPane(cp);
if (image == null) {
setOpacity(OPACITY);
}
thread = new RepaintThread();
thread.start();
}
private class RepaintThread extends Thread {
@Override
public void run() {
while (!interrupted()) {
if (seed++ > 20) {
seed = 2;
}
repaint();
try {
Thread.sleep(100L);
} catch (Exception ex) {
ex.printStackTrace();
}
}
}
}
private class CoverPanel extends JPanel {
public CoverPanel() {
setFocusable(true);
}
@Override
public void paintComponent(Graphics g) {
super.paintComponent(g);
if (g instanceof Graphics2D) {
((Graphics2D) g).setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_GASP);
}
int x = Math.min(CoverFrame.this.x, CoverFrame.this.x2);
int y = Math.min(CoverFrame.this.y, CoverFrame.this.y2);
int x2 = Math.max(CoverFrame.this.x, CoverFrame.this.x2);
int y2 = Math.max(CoverFrame.this.y, CoverFrame.this.y2);
/** If nothing is selected, default to x and y**/
int tx = x2 == 0 ? x : x2;
int ty = y2 == 0 ? y : y2;
if (image != null) {
/** Draw image over frame **/
g.drawImage(image, 0, 0, getWidth(), getHeight(), null);
/** Set color to transparent black **/
g.setColor(new Color(0, 0, 0, 100));
} else {
g.setColor(Color.black);
}
/** Draw black transparent color over all areas that isn't being selected **/
if (x2 == 0 && y2 == 0) {
g.fillRect(0, 0, getWidth(), getHeight());
} else {
g.fillRect(0, 0, x, getHeight());
g.fillRect(x, 0, getWidth(), y);
g.fillRect(tx, y, getWidth(), getHeight());
g.fillRect(x, ty, tx - x, getHeight());
}
Image cursor = IconUtils.getIcon("cursor").getImage();
g.setFont(new Font("Arial", Font.BOLD, 16));
RendererUtils.drawOutlinedString("X " + (x + rect.x) + " / Y " + (y + rect.y), x + 2, y - 2, Color.white, Color.black, g);
/** Draw cross **/
g.setColor(Color.white);
RendererUtils.drawMovingRect(tx, 0, 0, getHeight(), g, seed);
RendererUtils.drawMovingRect(0, ty, getWidth(), 0, g, seed);
if (x2 - x != 0 && y2 - y != 0) {
RendererUtils.drawOutlinedString("Width " + (x2 - x) + " / Height " + (y2 - y), x + 2, y - 4 - g.getFontMetrics().getHeight(), Color.white, Color.black, g);
g.setColor(Color.white);
g.drawRect(x, y, tx - x, ty - y);
RendererUtils.drawMovingRect(x, y, tx - x, ty - y, g, seed);
/** Draw cursor **/
g.drawImage(cursor, tx - cursor.getWidth(null) / 2, ty - cursor.getHeight(null) / 2, null);
BufferedImage preview = new BufferedImage(PREVIEW_SIZE, PREVIEW_SIZE, BufferedImage.TYPE_INT_RGB);
g.drawRect(x2, y2, PREVIEW_SIZE, PREVIEW_SIZE);
preview.createGraphics().drawImage(image, 0, 0, PREVIEW_SIZE, PREVIEW_SIZE, x2, y2, x2 + PREVIEW_SIZE, y2 + PREVIEW_SIZE, null);
preview = RendererUtils.scale(preview, BufferedImage.TYPE_INT_RGB, PREVIEW_SIZE, PREVIEW_SIZE, PREVIEW_SCALE);
g.drawImage(preview, x2 + 50, y2 + 50, PREVIEW_SIZE, PREVIEW_SIZE, null);
}
}
}
/**
* Called when enter is pressed or mouse released, if preferred
*/
public void submit() {
setVisible(false);
dispose();
}
/**
* Closes thread when object is gb'd
*/
@Override
protected void finalize() {
thread.interrupt();
}
@Override
public void mouseDragged(MouseEvent arg0) {
dragging = true;
x2 = arg0.getX();
y2 = arg0.getY();
repaint();
}
@Override
public void mouseMoved(MouseEvent arg0) {
if (!dragging && x == x2 && y == y2) {
x = arg0.getX();
y = arg0.getY();
x2 = arg0.getX();
y2 = arg0.getY();
}
repaint();
}
@Override
public void mouseClicked(MouseEvent arg0) {
}
@Override
public void mouseEntered(MouseEvent arg0) {
}
@Override
public void mouseExited(MouseEvent arg0) {
}
@Override
public void mousePressed(MouseEvent arg0) {
dragging = true;
x = arg0.getX();
y = arg0.getY();
x2 = arg0.getX();
y2 = arg0.getY();
repaint();
}
@Override
public void mouseReleased(MouseEvent arg0) {
dragging = false;
repaint();
}
@Override
public void keyPressed(KeyEvent arg0) {
if (arg0.getKeyCode() == KeyEvent.VK_ENTER) {
submit();
} else if (arg0.getKeyCode() == KeyEvent.VK_ESCAPE) {
x = 0;
y = 0;
x2 = 0;
y2 = 0;
}
}
@Override
public void keyReleased(KeyEvent arg0) {
}
@Override
public void keyTyped(KeyEvent arg0) {
}
}
| src/com/redpois0n/guiscrot/CoverFrame.java | package com.redpois0n.guiscrot;
import iconlib.IconUtils;
import java.awt.Color;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Image;
import java.awt.Rectangle;
import java.awt.RenderingHints;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseMotionListener;
import java.awt.image.BufferedImage;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.event.MouseInputListener;
@SuppressWarnings("serial")
public class CoverFrame extends JFrame implements KeyListener, MouseMotionListener, MouseInputListener {
public static final int PREVIEW_SIZE = 100;
public static final float OPACITY = 0.5F;
private Rectangle rect;
private Image image;
private int x;
private int y;
private int x2;
private int y2;
private boolean dragging;
private int seed;
private RepaintThread thread;
public CoverFrame(Rectangle rect) {
this(rect, null);
}
public CoverFrame(Rectangle rect, Image image) {
this.rect = rect;
this.image = image;
setUndecorated(true);
setBounds(rect);
addMouseListener(this);
addMouseMotionListener(this);
CoverPanel cp = new CoverPanel();
cp.addKeyListener(this);
setContentPane(cp);
if (image == null) {
setOpacity(OPACITY);
}
thread = new RepaintThread();
thread.start();
}
private class RepaintThread extends Thread {
@Override
public void run() {
while (!interrupted()) {
if (seed++ > 20) {
seed = 2;
}
repaint();
try {
Thread.sleep(100L);
} catch (Exception ex) {
ex.printStackTrace();
}
}
}
}
private class CoverPanel extends JPanel {
public CoverPanel() {
setFocusable(true);
}
@Override
public void paintComponent(Graphics g) {
super.paintComponent(g);
if (g instanceof Graphics2D) {
((Graphics2D) g).setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_GASP);
}
int x = Math.min(CoverFrame.this.x, CoverFrame.this.x2);
int y = Math.min(CoverFrame.this.y, CoverFrame.this.y2);
int x2 = Math.max(CoverFrame.this.x, CoverFrame.this.x2);
int y2 = Math.max(CoverFrame.this.y, CoverFrame.this.y2);
/** If nothing is selected, default to x and y**/
int tx = x2 == 0 ? x : x2;
int ty = y2 == 0 ? y : y2;
if (image != null) {
/** Draw image over frame **/
g.drawImage(image, 0, 0, getWidth(), getHeight(), null);
/** Set color to transparent black **/
g.setColor(new Color(0, 0, 0, 100));
} else {
g.setColor(Color.black);
}
/** Draw black transparent color over all areas that isn't being selected **/
if (x2 == 0 && y2 == 0) {
g.fillRect(0, 0, getWidth(), getHeight());
} else {
g.fillRect(0, 0, x, getHeight());
g.fillRect(x, 0, getWidth(), y);
g.fillRect(tx, y, getWidth(), getHeight());
g.fillRect(x, ty, tx - x, getHeight());
}
Image cursor = IconUtils.getIcon("cursor").getImage();
g.setFont(new Font("Arial", Font.BOLD, 16));
RendererUtils.drawOutlinedString("X " + (x + rect.x) + " / Y " + (y + rect.y), x + 2, y - 2, Color.white, Color.black, g);
/** Draw cross **/
g.setColor(Color.white);
RendererUtils.drawMovingRect(tx, 0, 0, getHeight(), g, seed);
RendererUtils.drawMovingRect(0, ty, getWidth(), 0, g, seed);
if (x2 - x != 0 && y2 - y != 0) {
RendererUtils.drawOutlinedString("Width " + (x2 - x) + " / Height " + (y2 - y), x + 2, y - 4 - g.getFontMetrics().getHeight(), Color.white, Color.black, g);
g.setColor(Color.white);
g.drawRect(x, y, tx - x, ty - y);
RendererUtils.drawMovingRect(x, y, tx - x, ty - y, g, seed);
/** Draw cursor **/
g.drawImage(cursor, tx - cursor.getWidth(null) / 2, ty - cursor.getHeight(null) / 2, null);
BufferedImage preview = new BufferedImage(PREVIEW_SIZE, PREVIEW_SIZE, BufferedImage.TYPE_INT_RGB);
preview.createGraphics().drawImage(image, 0, 0, PREVIEW_SIZE, PREVIEW_SIZE, x2 - PREVIEW_SIZE / 2, y2 - PREVIEW_SIZE / 2, x2 + PREVIEW_SIZE / 2, y2 + PREVIEW_SIZE / 2, null);
g.drawImage(preview, x, y, null);
}
}
}
/**
* Called when enter is pressed or mouse released, if preferred
*/
public void submit() {
setVisible(false);
dispose();
}
/**
* Closes thread when object is gb'd
*/
@Override
protected void finalize() {
thread.interrupt();
}
@Override
public void mouseDragged(MouseEvent arg0) {
dragging = true;
x2 = arg0.getX();
y2 = arg0.getY();
repaint();
}
@Override
public void mouseMoved(MouseEvent arg0) {
if (!dragging && x == x2 && y == y2) {
x = arg0.getX();
y = arg0.getY();
x2 = arg0.getX();
y2 = arg0.getY();
}
repaint();
}
@Override
public void mouseClicked(MouseEvent arg0) {
}
@Override
public void mouseEntered(MouseEvent arg0) {
}
@Override
public void mouseExited(MouseEvent arg0) {
}
@Override
public void mousePressed(MouseEvent arg0) {
dragging = true;
x = arg0.getX();
y = arg0.getY();
x2 = arg0.getX();
y2 = arg0.getY();
repaint();
}
@Override
public void mouseReleased(MouseEvent arg0) {
dragging = false;
repaint();
}
@Override
public void keyPressed(KeyEvent arg0) {
if (arg0.getKeyCode() == KeyEvent.VK_ENTER) {
submit();
} else if (arg0.getKeyCode() == KeyEvent.VK_ESCAPE) {
x = 0;
y = 0;
x2 = 0;
y2 = 0;
}
}
@Override
public void keyReleased(KeyEvent arg0) {
}
@Override
public void keyTyped(KeyEvent arg0) {
}
}
| Draws preview dialog, but starts at 0,0, x2, y2
| src/com/redpois0n/guiscrot/CoverFrame.java | Draws preview dialog, but starts at 0,0, x2, y2 |
|
Java | mit | c909164c181f4adc471951f56abebdb681afef0f | 0 | Vovas11/courses,Vovas11/courses,Vovas11/courses | package com.devproserv.courses.dao;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.when;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyString;
import static com.devproserv.courses.config.MainConfig.SELECT_LOGIN_SQL;
import static com.devproserv.courses.config.MainConfig.INSERT_USER_SQL;
import static com.devproserv.courses.config.MainConfig.INSERT_STUDENT_SQL;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import com.devproserv.courses.controller.AppContext;
import com.devproserv.courses.model.Student;
import com.devproserv.courses.model.User;
/**
* Contains unit-tests to check functionality of {@link UserDao} class
*
* @author vovas11
*
*/
public class UserDaoTest {
// dependencies to be mocked
@Mock
private AppContext appContext;
@Mock
private Connection connection;
@Mock
private PreparedStatement prepStmt;
@Mock
private PreparedStatement prepStmt1;
@Mock
private PreparedStatement prepStmt2;
@Mock
private ResultSet resultSet;
@Mock
private ResultSet resultSet1;
@Mock
private User user;
@Mock
private Student student;
private UserDao userDao;
// prepare dependencies
@Before
public void setUp() throws Exception {
MockitoAnnotations.initMocks(this);
userDao = new UserDao(connection);
// mocks methods of Connection
when(connection.prepareStatement(SELECT_LOGIN_SQL)).thenReturn(prepStmt);
when(connection.prepareStatement(INSERT_USER_SQL, Statement.RETURN_GENERATED_KEYS)).thenReturn(prepStmt1);
when(connection.prepareStatement(INSERT_STUDENT_SQL)).thenReturn(prepStmt2);
// mocks methods of PreparedStatement
doNothing().when(prepStmt).setString(anyInt(), anyString());
doNothing().when(prepStmt1).setString(anyInt(), anyString());
doNothing().when(prepStmt2).setString(anyInt(), anyString());
when(prepStmt.executeQuery()).thenReturn(resultSet);
when(prepStmt1.executeUpdate()).thenReturn(Integer.valueOf(1));
when(prepStmt1.getGeneratedKeys()).thenReturn(resultSet1);
}
@Test
public void testLoginExistsOk() throws SQLException {
// mocks methods of ResultSet
when(resultSet.next()).thenReturn(Boolean.valueOf(false));
Boolean loginExists = userDao.loginExists("Login");
assertFalse("Should be false",loginExists);
}
@Test
public void testLoginExistsNotOk() throws SQLException {
// mocks methods of ResultSet
when(resultSet.next()).thenReturn(Boolean.valueOf(true));
Boolean loginExists = userDao.loginExists("Login");
assertTrue("Should be false",loginExists);
}
@Test
public void testCreateUserOk() throws SQLException {
// mocks methods of ResultSet
when(resultSet1.next()).thenReturn(Boolean.valueOf(false));
// TODO continue with student.setId(generatedKey.getInt(1));
Boolean loginExists = userDao.loginExists("Login");
assertFalse("Should be false",loginExists);
}
}
| src/test/java/com/devproserv/courses/dao/UserDaoTest.java | package com.devproserv.courses.dao;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.when;
import static com.devproserv.courses.config.MainConfig.SELECT_LOGIN_SQL;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import com.devproserv.courses.controller.AppContext;
import com.devproserv.courses.model.Student;
import com.devproserv.courses.model.User;
/**
* Contains unit-tests to check functionality of {@link UserDao} class
*
* @author vovas11
*
*/
public class UserDaoTest {
// dependencies to be mocked
@Mock
private AppContext appContext;
@Mock
private Connection connection;
@Mock
private PreparedStatement prepStmt;
@Mock
private ResultSet resultSet;
@Mock
private User user;
@Mock
private Student student;
private UserDao userDao;
// prepare dependencies
@Before
public void setUp() throws Exception {
MockitoAnnotations.initMocks(this);
userDao = new UserDao(connection);
// mocks methods of Connection
when(connection.prepareStatement(SELECT_LOGIN_SQL)).thenReturn(prepStmt);
// mocks methods of PreparedStatement
doNothing().when(prepStmt).setString(1, "Login");
when(prepStmt.executeQuery()).thenReturn(resultSet);
}
@Test
public void testLoginExistsOk() throws SQLException {
// mocks methods of ResultSet
when(resultSet.next()).thenReturn(Boolean.valueOf(false));
Boolean loginExists = userDao.loginExists("Login");
assertFalse("Should be false",loginExists);
}
@Test
public void testLoginExistsNotOk() throws SQLException {
// mocks methods of ResultSet
when(resultSet.next()).thenReturn(Boolean.valueOf(true));
Boolean loginExists = userDao.loginExists("Login");
assertTrue("Should be false",loginExists);
}
}
| Add part of test for createUser() method in UserDao | src/test/java/com/devproserv/courses/dao/UserDaoTest.java | Add part of test for createUser() method in UserDao |
|
Java | mit | d19809a796a1faf9c276d761cb7c86edca1ce1ae | 0 | lwahlmeier/jil | package org.java_lcw.jil;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Random;
import org.eclipse.swt.graphics.ImageData;
import org.eclipse.swt.graphics.PaletteData;
import org.eclipse.swt.graphics.RGB;
import org.java_lcw.jil.scalers.BiCubicScaler;
import org.java_lcw.jil.scalers.BiLinearScaler;
import org.java_lcw.jil.scalers.NearestNeighborScaler;
/**
*
* Main Image object used to construct new Image files.
*
* @author lcw - Luke Wahlmeier
*/
public class Image {
/**
* MODE_L is grey scaled image (8 bits per pixel, 1 channel)
*/
public static final byte MODE_L = 8;
/**
* MODE_RGB is an RGB (Red Green Blue) image Each color is its own Channel (24 bits per pixel, 3 channels)
*/
public static final byte MODE_RGB = 24;
/**
* MODE_RGBA is an RGBA (Red Green Blue Alpha) image Each color and alpha has its own Channel (32 bits per pixel, 4 channels)
*/
public static final byte MODE_RGBA = 32;
private final int width;
private final int height;
private final byte bpp;
private final byte colors;
protected byte[] MAP;
/**
* Image Types for Image Object to use (open/save)
*
* @author lcw - Luke Wahlmeier
*
*/
public enum ImageType {TIFF, JPEG, PNG};
/**
* ScaleTypes that can be used when resizing an Image.
* NN - Nearest Neighbor - Very fast but kind somewhat noticeable scaler (Default).
* LINER - This is BiLiner, its very fast and descent quality.
* CUBIC - This is BiCubic, its looks very good in most situations but is a little slower.
* LANCZOS - This one has the highest Quality but is pretty slow.
* @author lcw - Luke Wahlmeier
*
*/
public enum ScaleType {NN, LINER, CUBIC, AWT_NN, AWT_LINER, AWT_CUBIC};
private Image(byte mode, int width, int height) {
colors = (byte) (mode/8);
int size = colors*width*height;
MAP = new byte[size];
this.width = width;
this.height = height;
this.bpp = mode;
}
private Image(byte mode, int width, int height, byte[] map) {
colors = (byte) (mode/8);
MAP = map;
this.width = width;
this.height = height;
this.bpp = mode;
}
/**
* Main Method for creating a new Image
*
* @param mode Image mode, uses the static bytes Image.MODE_(L, RGB, RGBA)
* @param width How wide the image should be in pixels
* @param height How high the Image should be in pixels
* @return Returns an Image object
*/
public static Image create(byte mode, int width, int height) {
return new Image((byte)(mode), width, height);
}
/**
* Creating a new Image with a default color
*
* @param mode Image mode, uses the static bytes Image.MODE_(L, RGB, RGBA)
* @param width How wide the image should be in pixels
* @param height How high the Image should be in pixels
* @param color default color to set for the image
* @return Returns an Image object
*/
public static Image create(byte mode, int width, int height, Color color) {
Image i = new Image((byte)(mode), width, height);
i.fillColor(color);
return i;
}
/**
* Create an Image object from a byte Array. The byte array must be a single array
* of bytes representing the mode given (L, RGB, or RGBA)
* @param mode Image mode, uses the static bytes Image.MODE_(L, RGB, RGBA)
* @param width How wide the image should be in pixels
* @param height How high the Image should be in pixels
* @param data byte[] to use to loading the data
* @return Returns an Image object with the provided byte[] set in it
* @throws ImageException This happens if the data provided is to large or to small for the (mode/8)*width*height
*/
public static Image fromByteArray(byte mode, int width, int height, byte[] data) {
byte cBytes = (byte)(mode/8);
if(data.length != (width*height*cBytes)){
throw new RuntimeException("Incorrect number of bytes to make an image of that type");
}
Image image = new Image(mode, width, height, data);
return image;
}
/**
* Static Method that allows you to open a file, just pass in the path/filename.
* @param filename Filename to attempt to open.
* @return Returns an Image object from the provided file.
* @throws ImageException This can happen if we do not know the type of file we where asked to open.
* @throws IOException This happens when we can not access the file.
*/
public static Image open(String filename) throws ImageException, IOException {
try {
return open(filename, getImageType(filename));
} catch(ImageException e) {
for(ImageType t: ImageType.values()) {
try {
return open(filename, t);
} catch(ImageException e1) {
}
}
throw new ImageException("Could not figure out image type!");
}
}
/**
* Static Method that allows you to open a file, just pass in the path/filename.
* @param filename Filename to attempt to open.
* @param type Type of file to open used Image.ImageType.(TIFF, PNG, JPEG)
* @return Returns an Image object from the provided file.
* @throws ImageException This can happen if we do not know the type of file we where asked to open.
* @throws IOException This happens when we can not access the file.
*
*/
public static Image open(String filename, ImageType type) throws IOException, ImageException {
Image image;
switch(type) {
case TIFF:
image = TiffFile.open(filename);
break;
case PNG:
image = PngFile.open(filename);
break;
case JPEG:
image = JpegFile.open(filename);
break;
default:
throw new ImageException("Could not determen filetype");
}
return image;
}
/**
* Save the image to the given file name. We determine the type based on the file extension (required)
* @param file Location for the file to be written out to
* @throws IOException This happens if we can not save/open that file
* @throws ImageException This happens if we can not figure out the type you want use to save as
*/
public void save(File file) throws IOException, ImageException {
save(file.getAbsolutePath());
}
/**
* Save the image for a given location with the provided type.
* @param file Location for the file to be written out to
* @param type Type of file to open used Image.ImageType.(TIFF, PNG, JPEG)
* @throws IOException This happens if we can not save/open that file
* @throws ImageException This happens if we can not figure out the type you want use to save as
*/
public void save(File file, ImageType type) throws IOException, ImageException {
save(file.getAbsolutePath(), type);
}
/**
* Save the image to the given file name. We determine the type based on the file extension (required)
* @param filename Path/Name of the file to save
* @throws IOException This happens if we can not save/open that file
* @throws ImageException This happens if we can not figure out the type you want use to save as
*/
public void save(String filename) throws IOException, ImageException {
ImageType type = getImageType(filename);
save(filename, type);
}
/**
* Save the image to the given file name.
* @param filename Path/Name of the file to save
* @param type Type of file to open used Image.ImageType.(TIFF, PNG, JPEG)
* @throws IOException This happens if we can not save/open that file
* @throws ImageException This happens if we can not figure out the type you want use to save as
*/
public void save(String filename, ImageType type) throws IOException, ImageException{
switch(type) {
case TIFF:
TiffFile.save(filename, this);
break;
case PNG:
PngFile.save(filename, this);
break;
case JPEG:
JpegFile.save(filename, this);
break;
default:
throw new ImageException("Could not determen file type");
}
}
/**
* Create an Image from a BufferedImage from AWT - The new Image will always be RGBA type
* @param BI BufferedImage to use to make the Image object
* @return returns an Image object based from the BufferedImage
* @throws ImageException This happens if there is something wrong with the BufferedImage
*/
public static Image fromBufferedImage(BufferedImage BI) {
Image img;
if (BI.getType() == BufferedImage.TYPE_BYTE_GRAY) {
img = Image.fromByteArray(MODE_L, BI.getWidth(), BI.getHeight(), Utils.bufferedImageToByteArray(BI));
} else if(BI.getType() == BufferedImage.TYPE_4BYTE_ABGR) {
img = Image.fromByteArray(MODE_RGBA, BI.getWidth(), BI.getHeight(), Utils.bufferedImageToByteArray(BI));
} else if(BI.getType() == BufferedImage.TYPE_3BYTE_BGR) {
img = Image.fromByteArray(MODE_RGBA, BI.getWidth(), BI.getHeight(), Utils.bufferedImageToByteArray(BI)).changeMode(MODE_RGB);
} else {
img = Image.fromByteArray(MODE_RGBA, BI.getWidth(), BI.getHeight(), Utils.bufferedImageToByteArray(BI));
}
return img;
}
/**
* Take the current Image object and make a BufferedImage out of it. This is always of TYPE_INT_ARGB.
* @return BufferedImage
* @throws ImageException
*/
public BufferedImage toBufferedImage() {
if(this.bpp == 8) {
BufferedImage BB = new BufferedImage(this.getWidth(), this.getHeight(), BufferedImage.TYPE_BYTE_GRAY);
byte[] test = ((DataBufferByte) BB.getRaster().getDataBuffer()).getData();
System.arraycopy(MAP, 0, test, 0, test.length);
return BB;
} else {
BufferedImage BB = new BufferedImage(this.getWidth(), this.getHeight(), BufferedImage.TYPE_INT_ARGB);
int[] array = Utils.bytesToInts(Image.MODE_RGBA, this.changeMode(Image.MODE_RGBA).getArray());
BB.setRGB(0, 0, this.getWidth(), this.getHeight(), array, 0, this.getWidth());
return BB;
}
}
/**
* This takes ImageData from SWT and makes an Image Object.
* @param data ImageData object to create an Image object from.
* @return An Image Object based from the ImageData provided.
* @throws ImageException
*/
public static Image fromImageData(ImageData data) throws ImageException {
PaletteData palette = data.palette;
Image newImg;
if (palette.isDirect) {
//TODO: need to stop dropping the alpha channel on the floor here!!
byte bpp = (byte)(data.depth);
int width = data.width*data.height;
int[] newInt = new int[width];
data.getPixels(0, 0, width, newInt, 0);
byte[] newBytes = Utils.intsToBytes(newInt, bpp);
newImg = Image.fromByteArray(bpp, data.width, data.height, newBytes);
} else {
byte bpp = (byte)(32);
newImg = Image.create(bpp, data.width, data.height);
RGB[] rgbs = palette.getRGBs();
byte[] red = new byte[rgbs.length];
byte[] green = new byte[rgbs.length];
byte[] blue = new byte[rgbs.length];
for (int i=0; i<rgbs.length; i++) {
red[i] = (byte)rgbs[i].red;
green[i] = (byte)rgbs[i].green;
blue[i] = (byte)rgbs[i].blue;
}
for(int y=0; y<data.height; y++) {
for( int x = 0; x<data.width; x++) {
int px = data.getPixel(x, y);
int alpha = data.getAlpha(x, y);
Color c = new Color(red[px], green[px], blue[px], (byte)alpha);
newImg.setPixel(x, y, c);
}
}
}
return newImg;
}
/**
* Create an SWT ImageData object based from the current Image object
* @return ImageData object
*/
public ImageData toImageData() {
PaletteData palette = new PaletteData(0xFF0000, 0x00FF00, 0x0000FF);
ImageData ID = new ImageData(this.getWidth(), this.getHeight(), 24, palette);
Color c;
int[] newInt;
byte[] cBytes = new byte[3];
for(int y=0; y<this.getHeight(); y++) {
for(int x=0; x<this.getWidth(); x++) {
c = this.getPixel(x, y);
if( this.getColors() == 4) {
cBytes[0] = c.getRed();
cBytes[1] = c.getGreen();
cBytes[2] = c.getBlue();
newInt = Utils.bytesToInts(Image.MODE_RGB, cBytes);
ID.setPixel(x, y, newInt[0]);
ID.setAlpha(x, y, c.getAlpha() &0xff );
} else if (this.getColors() == 3){
cBytes[0] = c.getRed();
cBytes[1] = c.getGreen();
cBytes[2] = c.getBlue();
newInt = Utils.bytesToInts(Image.MODE_RGB, cBytes);
ID.setPixel(x, y, newInt[0]);
} else {
cBytes[0] = c.getGrey();
cBytes[1] = c.getGrey();
cBytes[2] = c.getGrey();
newInt = Utils.bytesToInts(Image.MODE_RGB, cBytes);
ID.setPixel(x, y, newInt[0]);
}
}
}
return ID;
}
/**
* Change the MODE of the current Image. Use the static MODE_ types
* MODE_RGBA = 4 byte Image with an alpha channel
* MODE_RGB = 3 byte image
* MODE_L = 1 byte Image (black and white)
*
* @param MODE Sets the Image.MODE_ to change to
* @return Returns a new Image Object in that mode (Caution current Image object should be discarded
* as changes to it could effect the new Image Object
* @throws ImageException
*/
public Image changeMode(byte MODE) {
if (MODE == this.bpp) {
return this;
}
Image image = Image.create(MODE, width, height);
if (MODE == 8) {
int avg;
for (int x = 0; x < image.MAP.length; x++){
int pos = x*this.colors;
avg = ((MAP[pos]&0xff) + (MAP[pos+1]&0xff) + (MAP[pos+2]&0xff))/3;
image.MAP[x] = (byte) avg;
}
return image;
}
if (MODE == 24 && this.bpp == 32) {
for(int i=0; i< image.MAP.length/3; i++) {
int npos = i*3;
int opos = i*4;
image.MAP[npos] = MAP[opos];
image.MAP[npos+1] = MAP[opos+1];
image.MAP[npos+2] = MAP[opos+2];
}
} else if (MODE == 24 && this.bpp == 8) {
for(int i=0; i<MAP.length; i++) {
int pos = i*3;
image.MAP[pos] = MAP[i];
image.MAP[pos+1] = MAP[i];
image.MAP[pos+2] = MAP[i];
}
} else if (MODE == 32 && this.bpp == 8) {
for(int i=0; i<MAP.length; i++) {
int pos = i*4;
image.MAP[pos] = MAP[i];
image.MAP[pos+1] = MAP[i];
image.MAP[pos+2] = MAP[i];
image.MAP[pos+3] = (byte)255;
}
} else if (MODE == 32 && this.bpp == 24) {
for(int i=0; i<(MAP.length/3); i++) {
int npos = i*4;
int opos = i*3;
image.MAP[npos] = MAP[opos];
image.MAP[npos+1] = MAP[opos+1];
image.MAP[npos+2] = MAP[opos+2];
image.MAP[npos+3] = (byte)255;
}
}
return image;
}
/**
* This resizes the Image keeping its aspect then adds a border to it if it is not the set width/height
* @param bWidth new Width
* @param bHeight new Height
* @param borderColor new Height
* @return new Image object of the given size
*/
public Image resizeWithBorders(int bWidth, int bHeight, Color borderColor, ScaleType st) {
Image ib = Image.create(this.bpp, bWidth, bHeight, borderColor);
Image newI = resize(bWidth, bHeight, true, st);
if(newI.getHeight() == ib.getHeight()) {
int pos = (ib.getWidth()/2) - (newI.getWidth()/2);
ib.paste(pos, 0, newI);
} else {
int pos = (ib.getHeight()/2) - (newI.getHeight()/2);
ib.paste(0, pos, newI);
}
return ib;
}
/**
* This resizes the Image, uses the Nearest Neighbor scaler, and keeps aspect ratio
* @param width new Width
* @param height new Height
* @return new Image object of the given size
*/
public Image resize(int width, int height) {
return resize(width, height, true, ScaleType.NN);
}
/**
* This resizes the Image, uses the Nearest Neighbor scaler, and keeps aspect ratio
* @param width new Width
* @param height new Height
* @param keepAspect boolean, true means keep aspect, false means dont keep the aspect
* @return new Image object of the given size
*/
public Image resize(int width, int height, boolean keepAspect) {
return resize(width, height, keepAspect, ScaleType.NN);
}
/**
* This resizes the Image
* @param newWidth new Width
* @param newHeight new Height
* @param keepAspect boolean, true means keep aspect, false means dont keep the aspect
* @param st ScaleType to use (see Image.ScaleTypes)
* @return new Image object of the given size
*/
public Image resize(int newWidth, int newHeight, boolean keepAspect, ScaleType st) {
if(keepAspect) {
int[] aspect = Utils.getAspectSize(this.width, this.height, newWidth, newHeight);
newWidth = aspect[0];
newHeight = aspect[1];
}
Image tmp;
switch(st) {
case LINER:
tmp = BiLinearScaler.scale(this, newWidth, newHeight);
break;
case CUBIC:
tmp = BiCubicScaler.scale(this, newWidth, newHeight);
break;
case AWT_NN:
tmp = Image.fromByteArray(this.bpp, newWidth, newHeight, Utils.awtResizeNN(this, newWidth, newHeight));
break;
case AWT_LINER:
tmp = Image.fromByteArray(this.bpp, newWidth, newHeight,Utils.awtResizeLiner(this, newWidth, newHeight));
break;
case AWT_CUBIC:
tmp = Image.fromByteArray(this.bpp, newWidth, newHeight,Utils.awtResizeBiCubic(this, newWidth, newHeight));
break;
default:
tmp = NearestNeighborScaler.scale(this, newWidth, newHeight);
}
return tmp;
}
/**
* Fill current Image with this color
* @param c
*/
public void fillColor(Color c) {
if (this.bpp == 8){
Arrays.fill(MAP, c.getGrey());
} else if (this.bpp >= 24){
for(int i=0; i<MAP.length/this.colors; i++) {
int pos = i*this.colors;
MAP[pos] = c.getRed();
MAP[pos+1] = c.getGreen();
MAP[pos+2] = c.getBlue();
if (this.colors == 4){
MAP[pos+3] = c.getAlpha();
}
}
}
}
/**
* Set a pixel in this image to a given Color
*
* @param x X position of the pixel
* @param y Y position of the pixel
* @param c Color to set the pixel to (see Image.Color)
*/
public void setPixel(int x, int y, Color c) {
if(x<0 || x>=this.width) {
return;
}
if(y<0 || y>=this.height) {
return;
}
int pos = ((y*this.width)+x)*(this.colors);
if( this.bpp == 8) {
MAP[pos] = c.getGrey();
} else if (this.bpp >= 24) {
MAP[pos] = c.getRed();
MAP[pos+1] = c.getGreen();
MAP[pos+2] = c.getBlue();
if(this.bpp == 32) {
MAP[pos+3] = c.getAlpha();
}
}
}
public void mergePixel(int x, int y, Color c) {
Color cc = this.getPixel(x, y);
cc.merge(c);
setPixel(x, y, cc);
}
public void setPixelInChannel(int x, int y, byte c, byte p) {
int POS = ((y*this.getWidth())+x)*(this.colors)+c;
MAP[POS] = p;
}
/**
* The a color for a given pixel
* @param x X position of the pixel
* @param y Y position of the pixel
* @return Color object of that pixel
*/
public Color getPixel(int x, int y) {
if(x < 0 || x >= width || y < 0 || y >= height) {
return null;
}
int POS = ((y*this.getWidth())+x)*(this.colors);
if (this.getBPP() == 32) {
return new Color(MAP[POS], MAP[POS+1], MAP[POS+2], MAP[POS+3]);
} else if (this.getBPP() == 24) {
return new Color(MAP[POS], MAP[POS+1], MAP[POS+2]);
} else {
return new Color(MAP[POS]);
}
}
public byte getByteInChannel(int x, int y, byte c) {
int POS = ((y*this.getWidth())+x)*(this.colors)+c;
return MAP[POS];
}
/**
* Paste the given Image object onto this Image
* If the given Image is taller or wider then this Image we only merge the visible bits onto this Image
*
* @param x X position to start the merge
* @param y Y position to start the merge
* @param img Image object to merge
* @throws ImageException
*/
public void paste(int x, int y, Image img) {
paste(x, y, img, false);
}
/**
* Paste the given Image object onto this Image
* If the given Image is taller or wider then this Image we only merge the visible bits onto this Image.
* If alphaMerge == true and the img has an alpha channel we will use that as a mask on how to merge the images.
* @param x X position to start the merge
* @param y Y position to start the merge
* @param img Image object to merge
* @param alphaMerge should we do a mask type merge on any alpha channel?
* @throws ImageException
*/
public void paste(int x, int y, Image img, boolean alphaMerge){
if (img.height+y < 0 || y >= this.height) {
return;
}
if (img.width+x < 0 || x >= this.width) {
return;
}
if (! alphaMerge && img.getBPP() != this.getBPP()) {
img = img.changeMode(this.getBPP());
}
int imgXOff = 0;
int imgYOff = 0;
if(x < 0) {
imgXOff = Math.abs(x);
x=0;
}
if(y < 0) {
imgYOff = Math.abs(y);
y=0;
}
int thisLineWidth = this.width * this.colors;
int imgLineWidth = img.width * img.colors;
int imgXOffBytes = imgXOff * img.colors;
int XBytes = x*this.colors;
for(int h=y; h < this.height; h++) {
if(h-y+imgYOff >= img.height) {
break;
}
int thisStart = thisLineWidth*h;
int imgStart = imgLineWidth*(h-y+imgYOff);
if(! alphaMerge) {
System.arraycopy(img.MAP, imgStart+(imgXOffBytes), this.MAP, thisStart+(XBytes), Math.min(imgLineWidth-(imgXOffBytes), thisLineWidth-(XBytes)));
} else {
int maxWidth = Math.min(imgLineWidth-(imgXOffBytes), thisLineWidth-(XBytes));
for(int w = 0; w < maxWidth; w+=4) {
if(img.MAP[imgStart+w+imgXOffBytes+3] == 0) {
continue;
} else if (this.colors == 4 && this.MAP[thisStart+w+3] == 0) {
System.arraycopy(img.MAP, imgStart+w+imgXOffBytes, this.MAP, thisStart+w+(XBytes), this.colors);
} else if (img.MAP[imgStart+imgXOffBytes+3] == 255) {
System.arraycopy(img.MAP, imgStart+w+imgXOffBytes, this.MAP, thisStart+w+(XBytes), this.colors);
} else {
Color c = img.getPixel((w+imgXOffBytes)/img.colors, h-y+imgYOff);
Color c2 = this.getPixel((w/this.colors)+x, h);
c2.merge(c);
this.setPixel((w/this.colors)+x, h, c2);
}
}
}
}
}
public Image copy() {
Image newImage = Image.create(this.bpp, width, height);
System.arraycopy(MAP, 0, newImage.MAP, 0, MAP.length);
return newImage;
}
public Image cut(int x, int y, int width, int height) {
Image newImage = Image.create(this.bpp, width, height);
for(int yy = 0; yy< height; yy++) {
int startPos = (((y+yy)*this.width)+x)*(this.colors);
System.arraycopy(this.MAP, startPos, newImage.MAP, (yy*width*(newImage.colors)), width*(newImage.colors));
}
return newImage;
}
/**
* Sets this Image to random Data
*/
public void mkRandom() {
Random r = new Random();
r.nextBytes(MAP);
}
protected void setArray(byte[] array){
this.MAP= array;
}
/**
* This gives the backing byte array for the image. Modifying it will modify the image.
* @return byte[] of the raw Image data
*/
public byte[] getArray() {
return MAP;
}
/**
* Get the number of bitsPerPixel, this is the same as the Image.MODE_ of the Image
* @return byte (8, 24, 32)
*/
public byte getBPP(){
return this.bpp;
}
/**
* Returns the number color channels in this Image (BPP/8)
* @return byte (1, 3, or 4)
*/
public byte getColors(){
return this.colors;
}
/**
* Returns the width of this Image
* @return Image Width (int)
*/
public int getWidth(){
return this.width;
}
/**
* Returns the height of this Image
* @return Image Height (int)
*/
public int getHeight(){
return this.height;
}
//TODO: need to find image type by byte inspection!!
private static ImageType getImageType(String filename) throws ImageException {
String ext = filename.substring(filename.lastIndexOf('.')+1).toLowerCase();
if (ext.equals("tiff") || ext.equals("tif")) {
return ImageType.TIFF;
} else if (ext.equals("png")) {
return ImageType.PNG;
} else if (ext.equals("jpg") || ext.equals("jpeg")){
return ImageType.JPEG;
}
throw new ImageException("Could not determen file type");
}
public static class ImageException extends Exception {
private static final long serialVersionUID = 713250734097347352L;
public ImageException() {
super();
}
public ImageException(String string) {
super(string);
}
}
}
| src/main/java/org/java_lcw/jil/Image.java | package org.java_lcw.jil;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferByte;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Random;
import org.eclipse.swt.graphics.ImageData;
import org.eclipse.swt.graphics.PaletteData;
import org.eclipse.swt.graphics.RGB;
import org.java_lcw.jil.scalers.BiCubicScaler;
import org.java_lcw.jil.scalers.BiLinearScaler;
import org.java_lcw.jil.scalers.NearestNeighborScaler;
/**
*
* Main Image object used to construct new Image files.
*
* @author lcw - Luke Wahlmeier
*/
public class Image {
/**
* MODE_L is grey scaled image (8 bits per pixel, 1 channel)
*/
public static final byte MODE_L = 8;
/**
* MODE_RGB is an RGB (Red Green Blue) image Each color is its own Channel (24 bits per pixel, 3 channels)
*/
public static final byte MODE_RGB = 24;
/**
* MODE_RGBA is an RGBA (Red Green Blue Alpha) image Each color and alpha has its own Channel (32 bits per pixel, 4 channels)
*/
public static final byte MODE_RGBA = 32;
private final int width;
private final int height;
private final byte bpp;
private final byte colors;
protected byte[] MAP;
/**
* Image Types for Image Object to use (open/save)
*
* @author lcw - Luke Wahlmeier
*
*/
public enum ImageType {TIFF, JPEG, PNG};
/**
* ScaleTypes that can be used when resizing an Image.
* NN - Nearest Neighbor - Very fast but kind somewhat noticeable scaler (Default).
* LINER - This is BiLiner, its very fast and descent quality.
* CUBIC - This is BiCubic, its looks very good in most situations but is a little slower.
* LANCZOS - This one has the highest Quality but is pretty slow.
* @author lcw - Luke Wahlmeier
*
*/
public enum ScaleType {NN, LINER, CUBIC, AWT_NN, AWT_LINER, AWT_CUBIC};
private Image(byte mode, int width, int height) {
colors = (byte) (mode/8);
int size = colors*width*height;
MAP = new byte[size];
this.width = width;
this.height = height;
this.bpp = mode;
}
private Image(byte mode, int width, int height, byte[] map) {
colors = (byte) (mode/8);
MAP = map;
this.width = width;
this.height = height;
this.bpp = mode;
}
/**
* Main Method for creating a new Image
*
* @param mode Image mode, uses the static bytes Image.MODE_(L, RGB, RGBA)
* @param width How wide the image should be in pixels
* @param height How high the Image should be in pixels
* @return Returns an Image object
*/
public static Image create(byte mode, int width, int height) {
return new Image((byte)(mode), width, height);
}
/**
* Creating a new Image with a default color
*
* @param mode Image mode, uses the static bytes Image.MODE_(L, RGB, RGBA)
* @param width How wide the image should be in pixels
* @param height How high the Image should be in pixels
* @param color default color to set for the image
* @return Returns an Image object
*/
public static Image create(byte mode, int width, int height, Color color) {
Image i = new Image((byte)(mode), width, height);
i.fillColor(color);
return i;
}
/**
* Create an Image object from a byte Array. The byte array must be a single array
* of bytes representing the mode given (L, RGB, or RGBA)
* @param mode Image mode, uses the static bytes Image.MODE_(L, RGB, RGBA)
* @param width How wide the image should be in pixels
* @param height How high the Image should be in pixels
* @param data byte[] to use to loading the data
* @return Returns an Image object with the provided byte[] set in it
* @throws ImageException This happens if the data provided is to large or to small for the (mode/8)*width*height
*/
public static Image fromByteArray(byte mode, int width, int height, byte[] data) {
byte cBytes = (byte)(mode/8);
if(data.length != (width*height*cBytes)){
throw new RuntimeException("Incorrect number of bytes to make an image of that type");
}
Image image = new Image(mode, width, height, data);
return image;
}
/**
* Static Method that allows you to open a file, just pass in the path/filename.
* @param filename Filename to attempt to open.
* @return Returns an Image object from the provided file.
* @throws ImageException This can happen if we do not know the type of file we where asked to open.
* @throws IOException This happens when we can not access the file.
*/
public static Image open(String filename) throws ImageException, IOException {
try {
return open(filename, getImageType(filename));
} catch(ImageException e) {
for(ImageType t: ImageType.values()) {
try {
return open(filename, t);
} catch(ImageException e1) {
}
}
throw new ImageException("Could not figure out image type!");
}
}
/**
* Static Method that allows you to open a file, just pass in the path/filename.
* @param filename Filename to attempt to open.
* @param type Type of file to open used Image.ImageType.(TIFF, PNG, JPEG)
* @return Returns an Image object from the provided file.
* @throws ImageException This can happen if we do not know the type of file we where asked to open.
* @throws IOException This happens when we can not access the file.
*
*/
public static Image open(String filename, ImageType type) throws IOException, ImageException {
Image image;
switch(type) {
case TIFF:
image = TiffFile.open(filename);
break;
case PNG:
image = PngFile.open(filename);
break;
case JPEG:
image = JpegFile.open(filename);
break;
default:
throw new ImageException("Could not determen filetype");
}
return image;
}
/**
* Save the image to the given file name. We determine the type based on the file extension (required)
* @param file Location for the file to be written out to
* @throws IOException This happens if we can not save/open that file
* @throws ImageException This happens if we can not figure out the type you want use to save as
*/
public void save(File file) throws IOException, ImageException {
save(file.getAbsolutePath());
}
/**
* Save the image for a given location with the provided type.
* @param file Location for the file to be written out to
* @param type Type of file to open used Image.ImageType.(TIFF, PNG, JPEG)
* @throws IOException This happens if we can not save/open that file
* @throws ImageException This happens if we can not figure out the type you want use to save as
*/
public void save(File file, ImageType type) throws IOException, ImageException {
save(file.getAbsolutePath(), type);
}
/**
* Save the image to the given file name. We determine the type based on the file extension (required)
* @param filename Path/Name of the file to save
* @throws IOException This happens if we can not save/open that file
* @throws ImageException This happens if we can not figure out the type you want use to save as
*/
public void save(String filename) throws IOException, ImageException {
ImageType type = getImageType(filename);
save(filename, type);
}
/**
* Save the image to the given file name.
* @param filename Path/Name of the file to save
* @param type Type of file to open used Image.ImageType.(TIFF, PNG, JPEG)
* @throws IOException This happens if we can not save/open that file
* @throws ImageException This happens if we can not figure out the type you want use to save as
*/
public void save(String filename, ImageType type) throws IOException, ImageException{
switch(type) {
case TIFF:
TiffFile.save(filename, this);
break;
case PNG:
PngFile.save(filename, this);
break;
case JPEG:
JpegFile.save(filename, this);
break;
default:
throw new ImageException("Could not determen file type");
}
}
/**
* Create an Image from a BufferedImage from AWT - The new Image will always be RGBA type
* @param BI BufferedImage to use to make the Image object
* @return returns an Image object based from the BufferedImage
* @throws ImageException This happens if there is something wrong with the BufferedImage
*/
public static Image fromBufferedImage(BufferedImage BI) {
Image img;
if (BI.getType() == BufferedImage.TYPE_BYTE_GRAY) {
img = Image.fromByteArray(MODE_L, BI.getWidth(), BI.getHeight(), Utils.bufferedImageToByteArray(BI));
} else if(BI.getType() == BufferedImage.TYPE_4BYTE_ABGR) {
img = Image.fromByteArray(MODE_RGBA, BI.getWidth(), BI.getHeight(), Utils.bufferedImageToByteArray(BI));
} else if(BI.getType() == BufferedImage.TYPE_3BYTE_BGR) {
img = Image.fromByteArray(MODE_RGBA, BI.getWidth(), BI.getHeight(), Utils.bufferedImageToByteArray(BI)).changeMode(MODE_RGB);
} else {
img = Image.fromByteArray(MODE_RGBA, BI.getWidth(), BI.getHeight(), Utils.bufferedImageToByteArray(BI));
}
return img;
}
/**
* Take the current Image object and make a BufferedImage out of it. This is always of TYPE_INT_ARGB.
* @return BufferedImage
* @throws ImageException
*/
public BufferedImage toBufferedImage() {
if(this.bpp == 8) {
BufferedImage BB = new BufferedImage(this.getWidth(), this.getHeight(), BufferedImage.TYPE_BYTE_GRAY);
byte[] test = ((DataBufferByte) BB.getRaster().getDataBuffer()).getData();
System.arraycopy(MAP, 0, test, 0, test.length);
return BB;
} else {
BufferedImage BB = new BufferedImage(this.getWidth(), this.getHeight(), BufferedImage.TYPE_INT_ARGB);
int[] array = Utils.bytesToInts(Image.MODE_RGBA, this.changeMode(Image.MODE_RGBA).getArray());
BB.setRGB(0, 0, this.getWidth(), this.getHeight(), array, 0, this.getWidth());
return BB;
}
}
/**
* This takes ImageData from SWT and makes an Image Object.
* @param data ImageData object to create an Image object from.
* @return An Image Object based from the ImageData provided.
* @throws ImageException
*/
public static Image fromImageData(ImageData data) throws ImageException {
PaletteData palette = data.palette;
Image newImg;
if (palette.isDirect) {
//TODO: need to stop dropping the alpha channel on the floor here!!
byte bpp = (byte)(data.depth);
int width = data.width*data.height;
int[] newInt = new int[width];
data.getPixels(0, 0, width, newInt, 0);
byte[] newBytes = Utils.intsToBytes(newInt, bpp);
newImg = Image.fromByteArray(bpp, data.width, data.height, newBytes);
} else {
byte bpp = (byte)(32);
newImg = Image.create(bpp, data.width, data.height);
RGB[] rgbs = palette.getRGBs();
byte[] red = new byte[rgbs.length];
byte[] green = new byte[rgbs.length];
byte[] blue = new byte[rgbs.length];
for (int i=0; i<rgbs.length; i++) {
red[i] = (byte)rgbs[i].red;
green[i] = (byte)rgbs[i].green;
blue[i] = (byte)rgbs[i].blue;
}
for(int y=0; y<data.height; y++) {
for( int x = 0; x<data.width; x++) {
int px = data.getPixel(x, y);
int alpha = data.getAlpha(x, y);
Color c = new Color(red[px], green[px], blue[px], (byte)alpha);
newImg.setPixel(x, y, c);
}
}
}
return newImg;
}
/**
* Create an SWT ImageData object based from the current Image object
* @return ImageData object
*/
public ImageData toImageData() {
PaletteData palette = new PaletteData(0xFF0000, 0x00FF00, 0x0000FF);
ImageData ID = new ImageData(this.getWidth(), this.getHeight(), 24, palette);
Color c;
int[] newInt;
byte[] cBytes = new byte[3];
for(int y=0; y<this.getHeight(); y++) {
for(int x=0; x<this.getWidth(); x++) {
c = this.getPixel(x, y);
if( this.getColors() == 4) {
cBytes[0] = c.getRed();
cBytes[1] = c.getGreen();
cBytes[2] = c.getBlue();
newInt = Utils.bytesToInts(Image.MODE_RGB, cBytes);
ID.setPixel(x, y, newInt[0]);
ID.setAlpha(x, y, c.getAlpha() &0xff );
} else if (this.getColors() == 3){
cBytes[0] = c.getRed();
cBytes[1] = c.getGreen();
cBytes[2] = c.getBlue();
newInt = Utils.bytesToInts(Image.MODE_RGB, cBytes);
ID.setPixel(x, y, newInt[0]);
} else {
cBytes[0] = c.getGrey();
cBytes[1] = c.getGrey();
cBytes[2] = c.getGrey();
newInt = Utils.bytesToInts(Image.MODE_RGB, cBytes);
ID.setPixel(x, y, newInt[0]);
}
}
}
return ID;
}
/**
* Change the MODE of the current Image. Use the static MODE_ types
* MODE_RGBA = 4 byte Image with an alpha channel
* MODE_RGB = 3 byte image
* MODE_L = 1 byte Image (black and white)
*
* @param MODE Sets the Image.MODE_ to change to
* @return Returns a new Image Object in that mode (Caution current Image object should be discarded
* as changes to it could effect the new Image Object
* @throws ImageException
*/
public Image changeMode(byte MODE) {
if (MODE == this.bpp) {
return this;
}
Image image = Image.create(MODE, width, height);
if (MODE == 8) {
int avg;
for (int x = 0; x < image.MAP.length; x++){
int pos = x*this.colors;
avg = ((MAP[pos]&0xff) + (MAP[pos+1]&0xff) + (MAP[pos+2]&0xff))/3;
image.MAP[x] = (byte) avg;
}
return image;
}
if (MODE == 24 && this.bpp == 32) {
for(int i=0; i< image.MAP.length/3; i++) {
int npos = i*3;
int opos = i*4;
image.MAP[npos] = MAP[opos];
image.MAP[npos+1] = MAP[opos+1];
image.MAP[npos+2] = MAP[opos+2];
}
} else if (MODE == 24 && this.bpp == 8) {
for(int i=0; i<MAP.length; i++) {
int pos = i*3;
image.MAP[pos] = MAP[i];
image.MAP[pos+1] = MAP[i];
image.MAP[pos+2] = MAP[i];
}
} else if (MODE == 32 && this.bpp == 8) {
for(int i=0; i<MAP.length; i++) {
int pos = i*4;
image.MAP[pos] = MAP[i];
image.MAP[pos+1] = MAP[i];
image.MAP[pos+2] = MAP[i];
image.MAP[pos+3] = (byte)255;
}
} else if (MODE == 32 && this.bpp == 24) {
for(int i=0; i<(MAP.length/3); i++) {
int npos = i*4;
int opos = i*3;
image.MAP[npos] = MAP[opos];
image.MAP[npos+1] = MAP[opos+1];
image.MAP[npos+2] = MAP[opos+2];
image.MAP[npos+3] = (byte)255;
}
}
return image;
}
/**
* This resizes the Image keeping its aspect then adds a border to it if it is not the set width/height
* @param bWidth new Width
* @param bHeight new Height
* @param borderColor new Height
* @return new Image object of the given size
*/
public Image resizeWithBorders(int bWidth, int bHeight, Color borderColor, ScaleType st) {
Image ib = Image.create(this.bpp, bWidth, bHeight, borderColor);
Image newI = resize(bWidth, bHeight, true, st);
if(newI.getHeight() == ib.getHeight()) {
int pos = (ib.getWidth()/2) - (newI.getWidth()/2);
ib.paste(pos, 0, newI);
} else {
int pos = (ib.getHeight()/2) - (newI.getHeight()/2);
ib.paste(0, pos, newI);
}
return ib;
}
/**
* This resizes the Image, uses the Nearest Neighbor scaler, and keeps aspect ratio
* @param width new Width
* @param height new Height
* @return new Image object of the given size
*/
public Image resize(int width, int height) {
return resize(width, height, true, ScaleType.NN);
}
/**
* This resizes the Image, uses the Nearest Neighbor scaler, and keeps aspect ratio
* @param width new Width
* @param height new Height
* @param keepAspect boolean, true means keep aspect, false means dont keep the aspect
* @return new Image object of the given size
*/
public Image resize(int width, int height, boolean keepAspect) {
return resize(width, height, keepAspect, ScaleType.NN);
}
/**
* This resizes the Image
* @param newWidth new Width
* @param newHeight new Height
* @param keepAspect boolean, true means keep aspect, false means dont keep the aspect
* @param st ScaleType to use (see Image.ScaleTypes)
* @return new Image object of the given size
*/
public Image resize(int newWidth, int newHeight, boolean keepAspect, ScaleType st) {
if(keepAspect) {
int[] aspect = Utils.getAspectSize(this.width, this.height, newWidth, newHeight);
newWidth = aspect[0];
newHeight = aspect[1];
}
Image tmp;
switch(st) {
case LINER:
tmp = BiLinearScaler.scale(this, newWidth, newHeight);
break;
case CUBIC:
tmp = BiCubicScaler.scale(this, newWidth, newHeight);
break;
case AWT_NN:
tmp = Image.fromByteArray(this.bpp, newWidth, newHeight, Utils.awtResizeNN(this, newWidth, newHeight));
break;
case AWT_LINER:
tmp = Image.fromByteArray(this.bpp, newWidth, newHeight,Utils.awtResizeLiner(this, newWidth, newHeight));
break;
case AWT_CUBIC:
tmp = Image.fromByteArray(this.bpp, newWidth, newHeight,Utils.awtResizeBiCubic(this, newWidth, newHeight));
break;
default:
tmp = NearestNeighborScaler.scale(this, newWidth, newHeight);
}
return tmp;
}
/**
* Fill current Image with this color
* @param c
*/
public void fillColor(Color c) {
if (this.bpp == 8){
Arrays.fill(MAP, c.getGrey());
} else if (this.bpp >= 24){
for(int i=0; i<MAP.length/this.colors; i++) {
int pos = i*this.colors;
MAP[pos] = c.getRed();
MAP[pos+1] = c.getGreen();
MAP[pos+2] = c.getBlue();
if (this.colors == 4){
MAP[pos+3] = c.getAlpha();
}
}
}
}
/**
* Set a pixel in this image to a given Color
*
* @param x X position of the pixel
* @param y Y position of the pixel
* @param c Color to set the pixel to (see Image.Color)
*/
public void setPixel(int x, int y, Color c) {
if(x<0 || x>=this.width) {
return;
}
if(y<0 || y>=this.height) {
return;
}
int pos = ((y*this.width)+x)*(this.colors);
if( this.bpp == 8) {
MAP[pos] = c.getGrey();
} else if (this.bpp >= 24) {
MAP[pos] = c.getRed();
MAP[pos+1] = c.getGreen();
MAP[pos+2] = c.getBlue();
if(this.bpp == 32) {
MAP[pos+3] = c.getAlpha();
}
}
}
public void mergePixel(int x, int y, Color c) {
Color cc = this.getPixel(x, y);
cc.merge(c);
setPixel(x, y, cc);
}
public void setPixelInChannel(int x, int y, byte c, byte p) {
int POS = ((y*this.getWidth())+x)*(this.colors)+c;
MAP[POS] = p;
}
/**
* The a color for a given pixel
* @param x X position of the pixel
* @param y Y position of the pixel
* @return Color object of that pixel
*/
public Color getPixel(int x, int y) {
if(x < 0 || x >= width || y < 0 || y >= height) {
return null;
}
int POS = ((y*this.getWidth())+x)*(this.colors);
if (this.getBPP() == 32) {
return new Color(MAP[POS], MAP[POS+1], MAP[POS+2], MAP[POS+3]);
} else if (this.getBPP() == 24) {
return new Color(MAP[POS], MAP[POS+1], MAP[POS+2]);
} else {
return new Color(MAP[POS]);
}
}
public byte getByteInChannel(int x, int y, byte c) {
int POS = ((y*this.getWidth())+x)*(this.colors)+c;
return MAP[POS];
}
/**
* Paste the given Image object onto this Image
* If the given Image is taller or wider then this Image we only merge the visible bits onto this Image
*
* @param x X position to start the merge
* @param y Y position to start the merge
* @param img Image object to merge
* @throws ImageException
*/
public void paste(int x, int y, Image img) {
paste(x, y, img, false);
}
/**
* Paste the given Image object onto this Image
* If the given Image is taller or wider then this Image we only merge the visible bits onto this Image.
* If alphaMerge == true and the img has an alpha channel we will use that as a mask on how to merge the images.
* @param x X position to start the merge
* @param y Y position to start the merge
* @param img Image object to merge
* @param alphaMerge should we do a mask type merge on any alpha channel?
* @throws ImageException
*/
public void paste(int x, int y, Image img, boolean alphaMerge){
if (img.height+y < 0 || y >= this.height) {
return;
}
if (img.width+x < 0 || x >= this.width) {
return;
}
if (! alphaMerge && img.getBPP() != this.getBPP()) {
img = img.changeMode(this.getBPP());
}
int imgXOff = 0;
int imgYOff = 0;
if(x < 0) {
imgXOff = Math.abs(x);
x=0;
}
if(y < 0) {
imgYOff = Math.abs(y);
y=0;
}
int thisLineWidth = this.width * this.colors;
int imgLineWidth = img.width * img.colors;
int imgXOffBytes = imgXOff * img.colors;
int XBytes = x*this.colors;
for(int h=y; h < this.height; h++) {
if(h-y+imgYOff >= img.height) {
break;
}
int thisStart = thisLineWidth*h;
int imgStart = imgLineWidth*(h-y+imgYOff);
if(! alphaMerge) {
System.arraycopy(img.MAP, imgStart+(imgXOffBytes), this.MAP, thisStart+(XBytes), Math.min(imgLineWidth-(imgXOffBytes), thisLineWidth-(XBytes)));
} else {
for(int w = 0; w < Math.min(imgLineWidth-(imgXOffBytes), thisLineWidth-(XBytes)); w+=4) {
if(img.MAP[imgStart+w+imgXOffBytes+3] == 0) {
continue;
} else if (this.colors == 4 && this.MAP[thisStart+w+3] == 0) {
System.arraycopy(img.MAP, imgStart+w+imgXOffBytes, this.MAP, thisStart+w+(XBytes), this.colors);
} else if (img.MAP[imgStart+imgXOffBytes+3] == 255) {
System.arraycopy(img.MAP, imgStart+w+imgXOffBytes, this.MAP, thisStart+w+(XBytes), this.colors);
} else {
Color c = img.getPixel((w+imgXOffBytes)/img.colors, h-y+imgYOff);
Color c2 = this.getPixel((w/this.colors)+x, h);
c2.merge(c);
this.setPixel((w/this.colors)+x, h, c2);
}
}
}
}
}
public Image copy() {
Image newImage = Image.create(this.bpp, width, height);
System.arraycopy(MAP, 0, newImage.MAP, 0, MAP.length);
return newImage;
}
public Image cut(int x, int y, int width, int height) {
Image newImage = Image.create(this.bpp, width, height);
for(int yy = 0; yy< height; yy++) {
int startPos = (((y+yy)*this.width)+x)*(this.colors);
System.arraycopy(this.MAP, startPos, newImage.MAP, (yy*width*(newImage.colors)), width*(newImage.colors));
}
return newImage;
}
/**
* Sets this Image to random Data
*/
public void mkRandom() {
Random r = new Random();
r.nextBytes(MAP);
}
protected void setArray(byte[] array){
this.MAP= array;
}
/**
* This gives the backing byte array for the image. Modifying it will modify the image.
* @return byte[] of the raw Image data
*/
public byte[] getArray() {
return MAP;
}
/**
* Get the number of bitsPerPixel, this is the same as the Image.MODE_ of the Image
* @return byte (8, 24, 32)
*/
public byte getBPP(){
return this.bpp;
}
/**
* Returns the number color channels in this Image (BPP/8)
* @return byte (1, 3, or 4)
*/
public byte getColors(){
return this.colors;
}
/**
* Returns the width of this Image
* @return Image Width (int)
*/
public int getWidth(){
return this.width;
}
/**
* Returns the height of this Image
* @return Image Height (int)
*/
public int getHeight(){
return this.height;
}
//TODO: need to find image type by byte inspection!!
private static ImageType getImageType(String filename) throws ImageException {
String ext = filename.substring(filename.lastIndexOf('.')+1).toLowerCase();
if (ext.equals("tiff") || ext.equals("tif")) {
return ImageType.TIFF;
} else if (ext.equals("png")) {
return ImageType.PNG;
} else if (ext.equals("jpg") || ext.equals("jpeg")){
return ImageType.JPEG;
}
throw new ImageException("Could not determen file type");
}
public static class ImageException extends Exception {
private static final long serialVersionUID = 713250734097347352L;
public ImageException() {
super();
}
public ImageException(String string) {
super(string);
}
}
}
| optimize so we dont do math.min every loop
| src/main/java/org/java_lcw/jil/Image.java | optimize so we dont do math.min every loop |
|
Java | mit | 0ba782cda6f2c24071f85df9ee89625e1131c6bf | 0 | Haehnchen/idea-php-symfony2-plugin,gencer/idea-php-symfony2-plugin,issei-m/idea-php-symfony2-plugin,gencer/idea-php-symfony2-plugin,issei-m/idea-php-symfony2-plugin,Haehnchen/idea-php-symfony2-plugin,gencer/idea-php-symfony2-plugin,Haehnchen/idea-php-symfony2-plugin,Haehnchen/idea-php-symfony2-plugin,gencer/idea-php-symfony2-plugin,issei-m/idea-php-symfony2-plugin,Haehnchen/idea-php-symfony2-plugin,Haehnchen/idea-php-symfony2-plugin,issei-m/idea-php-symfony2-plugin | package fr.adrienbrault.idea.symfony2plugin.routing;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.codeInsight.lookup.LookupElementBuilder;
import com.intellij.ide.highlighter.XmlFileType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.patterns.PlatformPatterns;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.psi.impl.source.xml.XmlDocumentImpl;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.xml.XmlAttribute;
import com.intellij.psi.xml.XmlFile;
import com.intellij.psi.xml.XmlTag;
import com.intellij.psi.xml.XmlTagValue;
import com.intellij.util.Processor;
import com.intellij.util.indexing.FileBasedIndex;
import com.intellij.util.indexing.FileBasedIndexImpl;
import com.jetbrains.php.lang.PhpFileType;
import com.jetbrains.php.lang.documentation.phpdoc.parser.PhpDocElementTypes;
import com.jetbrains.php.lang.documentation.phpdoc.psi.tags.PhpDocTag;
import com.jetbrains.php.lang.psi.PhpFile;
import com.jetbrains.php.lang.psi.elements.*;
import fr.adrienbrault.idea.symfony2plugin.Symfony2Icons;
import fr.adrienbrault.idea.symfony2plugin.Symfony2InterfacesUtil;
import fr.adrienbrault.idea.symfony2plugin.Symfony2ProjectComponent;
import fr.adrienbrault.idea.symfony2plugin.stubs.SymfonyProcessors;
import fr.adrienbrault.idea.symfony2plugin.stubs.indexes.AnnotationRoutesStubIndex;
import fr.adrienbrault.idea.symfony2plugin.stubs.indexes.YamlRoutesStubIndex;
import fr.adrienbrault.idea.symfony2plugin.util.PhpElementsUtil;
import fr.adrienbrault.idea.symfony2plugin.util.PsiElementUtils;
import fr.adrienbrault.idea.symfony2plugin.util.controller.ControllerAction;
import fr.adrienbrault.idea.symfony2plugin.util.controller.ControllerIndex;
import fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper;
import org.apache.commons.lang.StringUtils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.yaml.YAMLFileType;
import org.jetbrains.yaml.psi.YAMLCompoundValue;
import org.jetbrains.yaml.psi.YAMLDocument;
import org.jetbrains.yaml.psi.YAMLFile;
import org.jetbrains.yaml.psi.YAMLKeyValue;
import java.io.IOException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class RouteHelper {
public static LookupElement[] getRouteParameterLookupElements(Project project, String routeName) {
List<LookupElement> lookupElements = new ArrayList<LookupElement>();
Route route = RouteHelper.getRoute(project, routeName);
if(route == null) {
return lookupElements.toArray(new LookupElement[lookupElements.size()]);
}
for(String values: route.getVariables()) {
lookupElements.add(LookupElementBuilder.create(values).withIcon(Symfony2Icons.ROUTE));
}
return lookupElements.toArray(new LookupElement[lookupElements.size()]);
}
@Nullable
public static Route getRoute(Project project, String routeName) {
Symfony2ProjectComponent symfony2ProjectComponent = project.getComponent(Symfony2ProjectComponent.class);
if(!symfony2ProjectComponent.getRoutes().containsKey(routeName)) {
return null;
}
return symfony2ProjectComponent.getRoutes().get(routeName);
}
public static PsiElement[] getRouteParameterPsiElements(Project project, String routeName, String parameterName) {
List<PsiElement> results = new ArrayList<PsiElement>();
for (PsiElement psiElement : RouteHelper.getMethods(project, routeName)) {
if(psiElement instanceof Method) {
for(Parameter parameter: ((Method) psiElement).getParameters()) {
if(parameter.getName().equals(parameterName)) {
results.add(parameter);
}
}
}
}
return results.toArray(new PsiElement[results.size()]);
}
public static PsiElement[] getMethods(Project project, String routeName) {
Route route = getRoute(project, routeName);
if(route == null) {
return new PsiElement[0];
}
String controllerName = route.getController();
return getMethodsOnControllerShortcut(project, controllerName);
}
public static PsiElement[] getMethodsOnControllerShortcut(Project project, String controllerName) {
if(controllerName == null) {
return new PsiElement[0];
}
// convert to class: FooBundle\Controller\BarController::fooBarAction
// convert to class: foo_service_bar:fooBar
if(controllerName.contains("::")) {
String className = controllerName.substring(0, controllerName.lastIndexOf("::"));
String methodName = controllerName.substring(controllerName.lastIndexOf("::") + 2);
return PhpElementsUtil.getPsiElementsBySignature(project, "#M#C\\" + className + "." + methodName);
} else if(controllerName.contains(":")) {
ControllerIndex controllerIndex = new ControllerIndex(project);
ControllerAction controllerServiceAction = controllerIndex.getControllerActionOnService(controllerName);
if(controllerServiceAction != null) {
return new PsiElement[] {controllerServiceAction.getMethod()};
}
}
return new PsiElement[0];
}
private static <E> ArrayList<E> makeCollection(Iterable<E> iter) {
ArrayList<E> list = new ArrayList<E>();
for (E item : iter) {
list.add(item);
}
return list;
}
public static Map<String, Route> getRoutes(Project project, VirtualFile virtualFile) {
Map<String, Route> routes = new HashMap<String, Route>();
try {
routes.putAll(getRoutes(VfsUtil.loadText(virtualFile)));
} catch (IOException ignored) {
}
PsiFile psiFile = PsiElementUtils.virtualFileToPsiFile(project, virtualFile);
if(!(psiFile instanceof PhpFile)) {
return routes;
}
// heavy stuff here, to get nested routing array :)
// list($variables, $defaults, $requirements, $tokens, $hostTokens)
Collection<PhpClass> phpClasses = PsiTreeUtil.findChildrenOfType(psiFile, PhpClass.class);
for(PhpClass phpClass: phpClasses) {
if(new Symfony2InterfacesUtil().isInstanceOf(phpClass, "\\Symfony\\Component\\Routing\\Generator\\UrlGeneratorInterface")) {
for(Field field: phpClass.getFields()) {
if(field.getName().equals("declaredRoutes")) {
PsiElement defaultValue = field.getDefaultValue();
if(defaultValue instanceof ArrayCreationExpression) {
Iterable<ArrayHashElement> arrayHashElements = ((ArrayCreationExpression) defaultValue).getHashElements();
for(ArrayHashElement arrayHashElement: arrayHashElements) {
PsiElement hashKey = arrayHashElement.getKey();
if(hashKey instanceof StringLiteralExpression) {
String routeName = ((StringLiteralExpression) hashKey).getContents();
if(isProductionRouteName(routeName)) {
routeName = convertLanguageRouteName(routeName);
PsiElement hashValue = arrayHashElement.getValue();
if(hashValue instanceof ArrayCreationExpression) {
routes.put(routeName, convertRouteConfig(routeName, (ArrayCreationExpression) hashValue));
}
}
}
}
}
}
}
}
}
return routes;
}
private static Route convertRouteConfig(String routeName, ArrayCreationExpression hashValue) {
List<ArrayHashElement> hashElementCollection = makeCollection(hashValue.getHashElements());
HashSet<String> variables = new HashSet<String>();
if(hashElementCollection.size() >= 1 && hashElementCollection.get(0).getValue() instanceof ArrayCreationExpression) {
variables.addAll(PhpElementsUtil.getArrayKeyValueMap((ArrayCreationExpression) hashElementCollection.get(0).getValue()).values());
}
HashMap<String, String> defaults = new HashMap<String, String>();
if(hashElementCollection.size() >= 2 && hashElementCollection.get(1).getValue() instanceof ArrayCreationExpression) {
defaults = PhpElementsUtil.getArrayKeyValueMap((ArrayCreationExpression) hashElementCollection.get(1).getValue());
}
HashMap<String, String>requirements = new HashMap<String, String>();
if(hashElementCollection.size() >= 3 && hashElementCollection.get(2).getValue() instanceof ArrayCreationExpression) {
requirements = PhpElementsUtil.getArrayKeyValueMap((ArrayCreationExpression) hashElementCollection.get(2).getValue());
}
ArrayList<Collection<String>> tokens = new ArrayList<Collection<String>>();
if(hashElementCollection.size() >= 4 && hashElementCollection.get(3).getValue() instanceof ArrayCreationExpression) {
ArrayCreationExpression tokenArray = (ArrayCreationExpression) hashElementCollection.get(3).getValue();
if(tokenArray != null) {
for(ArrayHashElement tokenArrayConfig: tokenArray.getHashElements()) {
if(tokenArrayConfig.getValue() instanceof ArrayCreationExpression) {
HashMap<String, String> arrayKeyValueMap = PhpElementsUtil.getArrayKeyValueMap((ArrayCreationExpression) tokenArrayConfig.getValue());
tokens.add(arrayKeyValueMap.values());
}
}
}
}
// hostTokens = 4 need them?
return new Route(routeName, variables, defaults, requirements, tokens);
}
private static boolean isProductionRouteName(String routeName) {
return !routeName.matches("_assetic_[0-9a-z]+[_\\d+]*");
}
/**
* support I18nRoutingBundle
*/
private static String convertLanguageRouteName(String routeName) {
if(routeName.matches("^[a-z]{2}__RG__.*$")) {
routeName = routeName.replaceAll("^[a-z]{2}+__RG__", "");
}
return routeName;
}
public static Map<String, Route> getRoutes(String routing) {
Map<String, Route> routes = new HashMap<String, Route>();
Matcher matcher = Pattern.compile("'((?:[^'\\\\]|\\\\.)*)' => [^\\n]+'_controller' => '((?:[^'\\\\]|\\\\.)*)'[^\\n]+\n").matcher(routing);
while (matcher.find()) {
String routeName = matcher.group(1);
// dont add _assetic_04d92f8, _assetic_04d92f8_0
if(!isProductionRouteName(routeName)) {
continue;
}
routeName = convertLanguageRouteName(routeName);
String controller = matcher.group(2).replace("\\\\", "\\");
Route route = new Route(routeName, controller);
routes.put(route.getName(), route);
}
return routes;
}
/**
* Foo\Bar::methodAction
*/
@Nullable
public static String convertMethodToRouteControllerName(Method method) {
PhpClass phpClass = method.getContainingClass();
if(phpClass == null) {
return null;
}
String className = phpClass.getPresentableFQN();
if(className == null) {
return null;
}
return (className.startsWith("\\") ? className.substring(1) : className) + "::" + method.getName();
}
public static VirtualFile[] getRouteDefinitionInsideFile(Project project, String... routeNames) {
final List<VirtualFile> virtualFiles = new ArrayList<VirtualFile> ();
FileBasedIndexImpl.getInstance().getFilesWithKey(YamlRoutesStubIndex.KEY, new HashSet<String>(Arrays.asList(routeNames)), new Processor<VirtualFile>() {
@Override
public boolean process(VirtualFile virtualFile) {
virtualFiles.add(virtualFile);
return true;
}
}, GlobalSearchScope.getScopeRestrictedByFileTypes(GlobalSearchScope.allScope(project), YAMLFileType.YML, XmlFileType.INSTANCE));
FileBasedIndexImpl.getInstance().getFilesWithKey(AnnotationRoutesStubIndex.KEY, new HashSet<String>(Arrays.asList(routeNames)), new Processor<VirtualFile>() {
@Override
public boolean process(VirtualFile virtualFile) {
virtualFiles.add(virtualFile);
return true;
}
}, GlobalSearchScope.getScopeRestrictedByFileTypes(GlobalSearchScope.allScope(project), PhpFileType.INSTANCE));
return virtualFiles.toArray(new VirtualFile[virtualFiles.size()]);
}
@Nullable
public static Set<String> getYamlRouteNames(YAMLDocument yamlDocument) {
Set<String> set = new HashSet<String>();
// get services or parameter key
YAMLKeyValue[] yamlKeys = PsiTreeUtil.getChildrenOfType(yamlDocument, YAMLKeyValue.class);
if(yamlKeys == null) {
return null;
}
for(YAMLKeyValue yamlKeyValue : yamlKeys) {
PsiElement element = yamlKeyValue.getValue();
if(element instanceof YAMLCompoundValue) {
Set<String> keySet = YamlHelper.getYamlCompoundValueKeyNames((YAMLCompoundValue) element);
if((keySet.contains("path") || keySet.contains("pattern")) && keySet.contains("defaults")) {
set.add(yamlKeyValue.getKeyText());
}
}
}
return set;
}
public static Set<String> getXmlRouteNames(XmlFile psiFile) {
Set<String> set = new HashSet<String>();
XmlDocumentImpl document = PsiTreeUtil.getChildOfType(psiFile, XmlDocumentImpl.class);
if(document == null) {
return set;
}
/**
* <routes>
* <route id="foo" path="/blog/{slug}">
* <default key="_controller">Foo</default>
* </route>
* </routes>
*/
for(XmlTag xmlTag: PsiTreeUtil.getChildrenOfTypeAsList(psiFile.getFirstChild(), XmlTag.class)) {
if(xmlTag.getName().equals("routes")) {
for(XmlTag servicesTag: xmlTag.getSubTags()) {
if(servicesTag.getName().equals("route")) {
XmlAttribute xmlAttribute = servicesTag.getAttribute("id");
if(xmlAttribute != null) {
String attrValue = xmlAttribute.getValue();
if(StringUtils.isNotBlank(attrValue)) {
set.add(attrValue);
}
}
}
}
}
}
return set;
}
@Nullable
public static PsiElement getXmlRouteNameTarget(@NotNull XmlFile psiFile,@NotNull String routeName) {
XmlDocumentImpl document = PsiTreeUtil.getChildOfType(psiFile, XmlDocumentImpl.class);
if(document == null) {
return null;
}
for(XmlTag xmlTag: PsiTreeUtil.getChildrenOfTypeAsList(psiFile.getFirstChild(), XmlTag.class)) {
if(xmlTag.getName().equals("routes")) {
for(XmlTag routeTag: xmlTag.getSubTags()) {
if(routeTag.getName().equals("route")) {
XmlAttribute xmlAttribute = routeTag.getAttribute("id");
if(xmlAttribute != null) {
String attrValue = xmlAttribute.getValue();
if(routeName.equals(attrValue)) {
return xmlAttribute;
}
}
}
}
}
}
return null;
}
@Nullable
public static List<Route> getRoutesOnControllerAction(Method method) {
String methodRouteActionName = RouteHelper.convertMethodToRouteControllerName(method);
if(methodRouteActionName == null) {
return null;
}
List<Route> routes = new ArrayList<Route>();
Symfony2ProjectComponent symfony2ProjectComponent = method.getProject().getComponent(Symfony2ProjectComponent.class);
for(Map.Entry<String, Route> routeEntry: symfony2ProjectComponent.getRoutes().entrySet()) {
if(routeEntry.getValue().getController() != null && routeEntry.getValue().getController().equals(methodRouteActionName)) {
routes.add(routeEntry.getValue());
}
}
return routes;
}
@Nullable
public static PsiElement getRouteNameTarget(Project project, String routeName) {
VirtualFile[] virtualFiles = RouteHelper.getRouteDefinitionInsideFile(project, routeName);
for(VirtualFile virtualFile: virtualFiles) {
PsiFile psiFile = PsiManager.getInstance(project).findFile(virtualFile);
if(psiFile instanceof YAMLFile) {
YAMLKeyValue yamlKeyValue = YamlHelper.getRootKey(psiFile, routeName);
if(yamlKeyValue != null) {
return yamlKeyValue;
}
}
if(psiFile instanceof XmlFile) {
PsiElement target = RouteHelper.getXmlRouteNameTarget((XmlFile) psiFile, routeName);
if(target != null) {
return target;
}
}
if(psiFile instanceof PhpFile) {
Collection<PhpDocTag> phpDocTagList = PsiTreeUtil.findChildrenOfType(psiFile, PhpDocTag.class);
for(PhpDocTag phpDocTag: phpDocTagList) {
String annotationFqnName = AnnotationRoutesStubIndex.getClassNameReference(phpDocTag);
if("\\Sensio\\Bundle\\FrameworkExtraBundle\\Configuration\\Route".equals(annotationFqnName)) {
PsiElement phpDocAttributeList = PsiElementUtils.getChildrenOfType(phpDocTag, PlatformPatterns.psiElement(PhpDocElementTypes.phpDocAttributeList));
if(phpDocAttributeList != null) {
// @TODO: use pattern
Matcher matcher = Pattern.compile("name\\s*=\\s*\"(\\w+)\"").matcher(phpDocAttributeList.getText());
if (matcher.find() && matcher.group(1).equals(routeName)) {
return phpDocAttributeList;
}
}
}
}
}
}
return null;
}
@Nullable
public static String getRouteUrl(List<Collection<String>> routeTokens) {
String url = "";
// copy list;
List<Collection<String>> tokens = new ArrayList<Collection<String>>(routeTokens);
Collections.reverse(tokens);
for(Collection<String> token: tokens) {
// copy, we are not allowed to mod list
List<String> list = new ArrayList<String>(token);
if(list.size() >= 2 && list.get(1).equals("text")) {
url = url.concat(list.get(0));
}
if(list.size() >= 4 && list.get(3).equals("variable")) {
url = url.concat(list.get(2) + "{" + list.get(0) + "}");
}
}
return url.length() == 0 ? null : url;
}
public static List<LookupElement> getRoutesLookupElements(final @NotNull Project project) {
Symfony2ProjectComponent symfony2ProjectComponent = project.getComponent(Symfony2ProjectComponent.class);
Map<String, Route> routes = symfony2ProjectComponent.getRoutes();
final List<LookupElement> lookupElements = new ArrayList<LookupElement>();
final Set<String> uniqueSet = new HashSet<String>();
for (Route route : routes.values()) {
lookupElements.add(new RouteLookupElement(route));
uniqueSet.add(route.getName());
}
SymfonyProcessors.CollectProjectUniqueKeysStrong ymlProjectProcessor = new SymfonyProcessors.CollectProjectUniqueKeysStrong(project, YamlRoutesStubIndex.KEY, uniqueSet);
FileBasedIndex.getInstance().processAllKeys(YamlRoutesStubIndex.KEY, ymlProjectProcessor, project);
for(String s: ymlProjectProcessor.getResult()) {
lookupElements.add(new RouteLookupElement(new Route(s, null), true));
uniqueSet.add(s);
}
SymfonyProcessors.CollectProjectUniqueKeysStrong annotationProjectProcessor = new SymfonyProcessors.CollectProjectUniqueKeysStrong(project, AnnotationRoutesStubIndex.KEY, uniqueSet);
FileBasedIndex.getInstance().processAllKeys(AnnotationRoutesStubIndex.KEY, annotationProjectProcessor, project);
for(String s: annotationProjectProcessor.getResult()) {
lookupElements.add(new RouteLookupElement(new Route(s, null), true));
uniqueSet.add(s);
}
return lookupElements;
}
public static List<PsiElement> getRouteDefinitionTargets(Project project, String routeName) {
List<PsiElement> targets = new ArrayList<PsiElement>();
Collections.addAll(targets, RouteHelper.getMethods(project, routeName));
PsiElement yamlKey = RouteHelper.getRouteNameTarget(project, routeName);
if(yamlKey != null) {
targets.add(yamlKey);
}
return targets;
}
}
| src/fr/adrienbrault/idea/symfony2plugin/routing/RouteHelper.java | package fr.adrienbrault.idea.symfony2plugin.routing;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.codeInsight.lookup.LookupElementBuilder;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.patterns.PlatformPatterns;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.psi.impl.source.xml.XmlDocumentImpl;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.xml.XmlAttribute;
import com.intellij.psi.xml.XmlFile;
import com.intellij.psi.xml.XmlTag;
import com.intellij.psi.xml.XmlTagValue;
import com.intellij.util.Processor;
import com.intellij.util.indexing.FileBasedIndex;
import com.intellij.util.indexing.FileBasedIndexImpl;
import com.jetbrains.php.lang.PhpFileType;
import com.jetbrains.php.lang.documentation.phpdoc.parser.PhpDocElementTypes;
import com.jetbrains.php.lang.documentation.phpdoc.psi.tags.PhpDocTag;
import com.jetbrains.php.lang.psi.PhpFile;
import com.jetbrains.php.lang.psi.elements.*;
import fr.adrienbrault.idea.symfony2plugin.Symfony2Icons;
import fr.adrienbrault.idea.symfony2plugin.Symfony2InterfacesUtil;
import fr.adrienbrault.idea.symfony2plugin.Symfony2ProjectComponent;
import fr.adrienbrault.idea.symfony2plugin.stubs.SymfonyProcessors;
import fr.adrienbrault.idea.symfony2plugin.stubs.indexes.AnnotationRoutesStubIndex;
import fr.adrienbrault.idea.symfony2plugin.stubs.indexes.YamlRoutesStubIndex;
import fr.adrienbrault.idea.symfony2plugin.util.PhpElementsUtil;
import fr.adrienbrault.idea.symfony2plugin.util.PsiElementUtils;
import fr.adrienbrault.idea.symfony2plugin.util.controller.ControllerAction;
import fr.adrienbrault.idea.symfony2plugin.util.controller.ControllerIndex;
import fr.adrienbrault.idea.symfony2plugin.util.yaml.YamlHelper;
import org.apache.commons.lang.StringUtils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.yaml.YAMLFileType;
import org.jetbrains.yaml.psi.YAMLCompoundValue;
import org.jetbrains.yaml.psi.YAMLDocument;
import org.jetbrains.yaml.psi.YAMLFile;
import org.jetbrains.yaml.psi.YAMLKeyValue;
import java.io.IOException;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class RouteHelper {
public static LookupElement[] getRouteParameterLookupElements(Project project, String routeName) {
List<LookupElement> lookupElements = new ArrayList<LookupElement>();
Route route = RouteHelper.getRoute(project, routeName);
if(route == null) {
return lookupElements.toArray(new LookupElement[lookupElements.size()]);
}
for(String values: route.getVariables()) {
lookupElements.add(LookupElementBuilder.create(values).withIcon(Symfony2Icons.ROUTE));
}
return lookupElements.toArray(new LookupElement[lookupElements.size()]);
}
@Nullable
public static Route getRoute(Project project, String routeName) {
Symfony2ProjectComponent symfony2ProjectComponent = project.getComponent(Symfony2ProjectComponent.class);
if(!symfony2ProjectComponent.getRoutes().containsKey(routeName)) {
return null;
}
return symfony2ProjectComponent.getRoutes().get(routeName);
}
public static PsiElement[] getRouteParameterPsiElements(Project project, String routeName, String parameterName) {
List<PsiElement> results = new ArrayList<PsiElement>();
for (PsiElement psiElement : RouteHelper.getMethods(project, routeName)) {
if(psiElement instanceof Method) {
for(Parameter parameter: ((Method) psiElement).getParameters()) {
if(parameter.getName().equals(parameterName)) {
results.add(parameter);
}
}
}
}
return results.toArray(new PsiElement[results.size()]);
}
public static PsiElement[] getMethods(Project project, String routeName) {
Route route = getRoute(project, routeName);
if(route == null) {
return new PsiElement[0];
}
String controllerName = route.getController();
return getMethodsOnControllerShortcut(project, controllerName);
}
public static PsiElement[] getMethodsOnControllerShortcut(Project project, String controllerName) {
if(controllerName == null) {
return new PsiElement[0];
}
// convert to class: FooBundle\Controller\BarController::fooBarAction
// convert to class: foo_service_bar:fooBar
if(controllerName.contains("::")) {
String className = controllerName.substring(0, controllerName.lastIndexOf("::"));
String methodName = controllerName.substring(controllerName.lastIndexOf("::") + 2);
return PhpElementsUtil.getPsiElementsBySignature(project, "#M#C\\" + className + "." + methodName);
} else if(controllerName.contains(":")) {
ControllerIndex controllerIndex = new ControllerIndex(project);
ControllerAction controllerServiceAction = controllerIndex.getControllerActionOnService(controllerName);
if(controllerServiceAction != null) {
return new PsiElement[] {controllerServiceAction.getMethod()};
}
}
return new PsiElement[0];
}
private static <E> ArrayList<E> makeCollection(Iterable<E> iter) {
ArrayList<E> list = new ArrayList<E>();
for (E item : iter) {
list.add(item);
}
return list;
}
public static Map<String, Route> getRoutes(Project project, VirtualFile virtualFile) {
Map<String, Route> routes = new HashMap<String, Route>();
try {
routes.putAll(getRoutes(VfsUtil.loadText(virtualFile)));
} catch (IOException ignored) {
}
PsiFile psiFile = PsiElementUtils.virtualFileToPsiFile(project, virtualFile);
if(!(psiFile instanceof PhpFile)) {
return routes;
}
// heavy stuff here, to get nested routing array :)
// list($variables, $defaults, $requirements, $tokens, $hostTokens)
Collection<PhpClass> phpClasses = PsiTreeUtil.findChildrenOfType(psiFile, PhpClass.class);
for(PhpClass phpClass: phpClasses) {
if(new Symfony2InterfacesUtil().isInstanceOf(phpClass, "\\Symfony\\Component\\Routing\\Generator\\UrlGeneratorInterface")) {
for(Field field: phpClass.getFields()) {
if(field.getName().equals("declaredRoutes")) {
PsiElement defaultValue = field.getDefaultValue();
if(defaultValue instanceof ArrayCreationExpression) {
Iterable<ArrayHashElement> arrayHashElements = ((ArrayCreationExpression) defaultValue).getHashElements();
for(ArrayHashElement arrayHashElement: arrayHashElements) {
PsiElement hashKey = arrayHashElement.getKey();
if(hashKey instanceof StringLiteralExpression) {
String routeName = ((StringLiteralExpression) hashKey).getContents();
if(isProductionRouteName(routeName)) {
routeName = convertLanguageRouteName(routeName);
PsiElement hashValue = arrayHashElement.getValue();
if(hashValue instanceof ArrayCreationExpression) {
routes.put(routeName, convertRouteConfig(routeName, (ArrayCreationExpression) hashValue));
}
}
}
}
}
}
}
}
}
return routes;
}
private static Route convertRouteConfig(String routeName, ArrayCreationExpression hashValue) {
List<ArrayHashElement> hashElementCollection = makeCollection(hashValue.getHashElements());
HashSet<String> variables = new HashSet<String>();
if(hashElementCollection.size() >= 1 && hashElementCollection.get(0).getValue() instanceof ArrayCreationExpression) {
variables.addAll(PhpElementsUtil.getArrayKeyValueMap((ArrayCreationExpression) hashElementCollection.get(0).getValue()).values());
}
HashMap<String, String> defaults = new HashMap<String, String>();
if(hashElementCollection.size() >= 2 && hashElementCollection.get(1).getValue() instanceof ArrayCreationExpression) {
defaults = PhpElementsUtil.getArrayKeyValueMap((ArrayCreationExpression) hashElementCollection.get(1).getValue());
}
HashMap<String, String>requirements = new HashMap<String, String>();
if(hashElementCollection.size() >= 3 && hashElementCollection.get(2).getValue() instanceof ArrayCreationExpression) {
requirements = PhpElementsUtil.getArrayKeyValueMap((ArrayCreationExpression) hashElementCollection.get(2).getValue());
}
ArrayList<Collection<String>> tokens = new ArrayList<Collection<String>>();
if(hashElementCollection.size() >= 4 && hashElementCollection.get(3).getValue() instanceof ArrayCreationExpression) {
ArrayCreationExpression tokenArray = (ArrayCreationExpression) hashElementCollection.get(3).getValue();
if(tokenArray != null) {
for(ArrayHashElement tokenArrayConfig: tokenArray.getHashElements()) {
if(tokenArrayConfig.getValue() instanceof ArrayCreationExpression) {
HashMap<String, String> arrayKeyValueMap = PhpElementsUtil.getArrayKeyValueMap((ArrayCreationExpression) tokenArrayConfig.getValue());
tokens.add(arrayKeyValueMap.values());
}
}
}
}
// hostTokens = 4 need them?
return new Route(routeName, variables, defaults, requirements, tokens);
}
private static boolean isProductionRouteName(String routeName) {
return !routeName.matches("_assetic_[0-9a-z]+[_\\d+]*");
}
/**
* support I18nRoutingBundle
*/
private static String convertLanguageRouteName(String routeName) {
if(routeName.matches("^[a-z]{2}__RG__.*$")) {
routeName = routeName.replaceAll("^[a-z]{2}+__RG__", "");
}
return routeName;
}
public static Map<String, Route> getRoutes(String routing) {
Map<String, Route> routes = new HashMap<String, Route>();
Matcher matcher = Pattern.compile("'((?:[^'\\\\]|\\\\.)*)' => [^\\n]+'_controller' => '((?:[^'\\\\]|\\\\.)*)'[^\\n]+\n").matcher(routing);
while (matcher.find()) {
String routeName = matcher.group(1);
// dont add _assetic_04d92f8, _assetic_04d92f8_0
if(!isProductionRouteName(routeName)) {
continue;
}
routeName = convertLanguageRouteName(routeName);
String controller = matcher.group(2).replace("\\\\", "\\");
Route route = new Route(routeName, controller);
routes.put(route.getName(), route);
}
return routes;
}
/**
* Foo\Bar::methodAction
*/
@Nullable
public static String convertMethodToRouteControllerName(Method method) {
PhpClass phpClass = method.getContainingClass();
if(phpClass == null) {
return null;
}
String className = phpClass.getPresentableFQN();
if(className == null) {
return null;
}
return (className.startsWith("\\") ? className.substring(1) : className) + "::" + method.getName();
}
public static VirtualFile[] getRouteDefinitionInsideFile(Project project, String... routeNames) {
final List<VirtualFile> virtualFiles = new ArrayList<VirtualFile> ();
FileBasedIndexImpl.getInstance().getFilesWithKey(YamlRoutesStubIndex.KEY, new HashSet<String>(Arrays.asList(routeNames)), new Processor<VirtualFile>() {
@Override
public boolean process(VirtualFile virtualFile) {
virtualFiles.add(virtualFile);
return true;
}
}, GlobalSearchScope.getScopeRestrictedByFileTypes(GlobalSearchScope.allScope(project), YAMLFileType.YML));
FileBasedIndexImpl.getInstance().getFilesWithKey(AnnotationRoutesStubIndex.KEY, new HashSet<String>(Arrays.asList(routeNames)), new Processor<VirtualFile>() {
@Override
public boolean process(VirtualFile virtualFile) {
virtualFiles.add(virtualFile);
return true;
}
}, GlobalSearchScope.getScopeRestrictedByFileTypes(GlobalSearchScope.allScope(project), PhpFileType.INSTANCE));
return virtualFiles.toArray(new VirtualFile[virtualFiles.size()]);
}
@Nullable
public static Set<String> getYamlRouteNames(YAMLDocument yamlDocument) {
Set<String> set = new HashSet<String>();
// get services or parameter key
YAMLKeyValue[] yamlKeys = PsiTreeUtil.getChildrenOfType(yamlDocument, YAMLKeyValue.class);
if(yamlKeys == null) {
return null;
}
for(YAMLKeyValue yamlKeyValue : yamlKeys) {
PsiElement element = yamlKeyValue.getValue();
if(element instanceof YAMLCompoundValue) {
Set<String> keySet = YamlHelper.getYamlCompoundValueKeyNames((YAMLCompoundValue) element);
if((keySet.contains("path") || keySet.contains("pattern")) && keySet.contains("defaults")) {
set.add(yamlKeyValue.getKeyText());
}
}
}
return set;
}
public static Set<String> getXmlRouteNames(XmlFile psiFile) {
Set<String> set = new HashSet<String>();
XmlDocumentImpl document = PsiTreeUtil.getChildOfType(psiFile, XmlDocumentImpl.class);
if(document == null) {
return set;
}
/**
* <routes>
* <route id="foo" path="/blog/{slug}">
* <default key="_controller">Foo</default>
* </route>
* </routes>
*/
for(XmlTag xmlTag: PsiTreeUtil.getChildrenOfTypeAsList(psiFile.getFirstChild(), XmlTag.class)) {
if(xmlTag.getName().equals("routes")) {
for(XmlTag servicesTag: xmlTag.getSubTags()) {
if(servicesTag.getName().equals("route")) {
XmlAttribute xmlAttribute = servicesTag.getAttribute("id");
if(xmlAttribute != null) {
String attrValue = xmlAttribute.getValue();
if(StringUtils.isNotBlank(attrValue)) {
set.add(attrValue);
}
}
}
}
}
}
return set;
}
@Nullable
public static List<Route> getRoutesOnControllerAction(Method method) {
String methodRouteActionName = RouteHelper.convertMethodToRouteControllerName(method);
if(methodRouteActionName == null) {
return null;
}
List<Route> routes = new ArrayList<Route>();
Symfony2ProjectComponent symfony2ProjectComponent = method.getProject().getComponent(Symfony2ProjectComponent.class);
for(Map.Entry<String, Route> routeEntry: symfony2ProjectComponent.getRoutes().entrySet()) {
if(routeEntry.getValue().getController() != null && routeEntry.getValue().getController().equals(methodRouteActionName)) {
routes.add(routeEntry.getValue());
}
}
return routes;
}
@Nullable
public static PsiElement getRouteNameTarget(Project project, String routeName) {
VirtualFile[] virtualFiles = RouteHelper.getRouteDefinitionInsideFile(project, routeName);
for(VirtualFile virtualFile: virtualFiles) {
PsiFile psiFile = PsiManager.getInstance(project).findFile(virtualFile);
if(psiFile instanceof YAMLFile) {
YAMLKeyValue yamlKeyValue = YamlHelper.getRootKey(psiFile, routeName);
if(yamlKeyValue != null) {
return yamlKeyValue;
}
}
if(psiFile instanceof PhpFile) {
Collection<PhpDocTag> phpDocTagList = PsiTreeUtil.findChildrenOfType(psiFile, PhpDocTag.class);
for(PhpDocTag phpDocTag: phpDocTagList) {
String annotationFqnName = AnnotationRoutesStubIndex.getClassNameReference(phpDocTag);
if("\\Sensio\\Bundle\\FrameworkExtraBundle\\Configuration\\Route".equals(annotationFqnName)) {
PsiElement phpDocAttributeList = PsiElementUtils.getChildrenOfType(phpDocTag, PlatformPatterns.psiElement(PhpDocElementTypes.phpDocAttributeList));
if(phpDocAttributeList != null) {
// @TODO: use pattern
Matcher matcher = Pattern.compile("name\\s*=\\s*\"(\\w+)\"").matcher(phpDocAttributeList.getText());
if (matcher.find() && matcher.group(1).equals(routeName)) {
return phpDocAttributeList;
}
}
}
}
}
}
return null;
}
@Nullable
public static String getRouteUrl(List<Collection<String>> routeTokens) {
String url = "";
// copy list;
List<Collection<String>> tokens = new ArrayList<Collection<String>>(routeTokens);
Collections.reverse(tokens);
for(Collection<String> token: tokens) {
// copy, we are not allowed to mod list
List<String> list = new ArrayList<String>(token);
if(list.size() >= 2 && list.get(1).equals("text")) {
url = url.concat(list.get(0));
}
if(list.size() >= 4 && list.get(3).equals("variable")) {
url = url.concat(list.get(2) + "{" + list.get(0) + "}");
}
}
return url.length() == 0 ? null : url;
}
public static List<LookupElement> getRoutesLookupElements(final @NotNull Project project) {
Symfony2ProjectComponent symfony2ProjectComponent = project.getComponent(Symfony2ProjectComponent.class);
Map<String, Route> routes = symfony2ProjectComponent.getRoutes();
final List<LookupElement> lookupElements = new ArrayList<LookupElement>();
final Set<String> uniqueSet = new HashSet<String>();
for (Route route : routes.values()) {
lookupElements.add(new RouteLookupElement(route));
uniqueSet.add(route.getName());
}
SymfonyProcessors.CollectProjectUniqueKeysStrong ymlProjectProcessor = new SymfonyProcessors.CollectProjectUniqueKeysStrong(project, YamlRoutesStubIndex.KEY, uniqueSet);
FileBasedIndex.getInstance().processAllKeys(YamlRoutesStubIndex.KEY, ymlProjectProcessor, project);
for(String s: ymlProjectProcessor.getResult()) {
lookupElements.add(new RouteLookupElement(new Route(s, null), true));
uniqueSet.add(s);
}
SymfonyProcessors.CollectProjectUniqueKeysStrong annotationProjectProcessor = new SymfonyProcessors.CollectProjectUniqueKeysStrong(project, AnnotationRoutesStubIndex.KEY, uniqueSet);
FileBasedIndex.getInstance().processAllKeys(AnnotationRoutesStubIndex.KEY, annotationProjectProcessor, project);
for(String s: annotationProjectProcessor.getResult()) {
lookupElements.add(new RouteLookupElement(new Route(s, null), true));
uniqueSet.add(s);
}
return lookupElements;
}
public static List<PsiElement> getRouteDefinitionTargets(Project project, String routeName) {
List<PsiElement> targets = new ArrayList<PsiElement>();
Collections.addAll(targets, RouteHelper.getMethods(project, routeName));
PsiElement yamlKey = RouteHelper.getRouteNameTarget(project, routeName);
if(yamlKey != null) {
targets.add(yamlKey);
}
return targets;
}
}
| provide navigation for xml route files
| src/fr/adrienbrault/idea/symfony2plugin/routing/RouteHelper.java | provide navigation for xml route files |
|
Java | mit | b074f9b1ab42feb69024a631fcf558266046459d | 0 | LearningTree/TicketManorJava,LearningTree/TicketManorJava,LearningTree/TicketManorJava,LearningTree/TicketManorJava | package com.ticketmanor.model;
import javax.persistence.*;
import javax.validation.constraints.NotNull;
/*
* Person - XXX
*/
@Entity @Table(name="People")
public class Person {
@Id @GeneratedValue(strategy=GenerationType.AUTO)
long id;
@NotNull
protected String firstName;
protected String middles;
@NotNull
protected String lastName;
String email;
@Embedded
Address address;
public Person() {
// EMPTY
}
public Person(String firstName, String lastName) {
this.firstName = firstName;
this.lastName = lastName;
}
@Override
public String toString() {
return getClass().getSimpleName() + getFullName();
}
@Transient
public String getFullName() {
StringBuilder sb = new StringBuilder(firstName);
if (middles != null) {
sb.append(" ").append(middles);
}
sb.append(" ").append(lastName);
return sb.toString();
}
}
| src/main/java/com/ticketmanor/model/Person.java | package com.ticketmanor.model;
import javax.persistence.*;
import javax.validation.constraints.NotNull;
/*
* Person - XXX
*/
@Entity @Table(name="people")
public class Person {
@Id @GeneratedValue(strategy=GenerationType.AUTO)
long id;
@NotNull
protected String firstName;
protected String middles;
@NotNull
protected String lastName;
String email;
@Embedded
Address address;
public Person() {
// EMPTY
}
public Person(String firstName, String lastName) {
this.firstName = firstName;
this.lastName = lastName;
}
@Override
public String toString() {
return getClass().getSimpleName() + getFullName();
}
@Transient
public String getFullName() {
StringBuilder sb = new StringBuilder(firstName);
if (middles != null) {
sb.append(" ").append(middles);
}
sb.append(" ").append(lastName);
return sb.toString();
}
}
| Capitalize table name for consistency
| src/main/java/com/ticketmanor/model/Person.java | Capitalize table name for consistency |
|
Java | mit | 8378ee3c7230fe2bf80953c3410b8264f2bfce1b | 0 | jjnguy/Jinq2XML | package xmlcomponents.autoparse;
import java.lang.reflect.Field;
import java.lang.reflect.ParameterizedType;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import xmlcomponents.Converter;
import xmlcomponents.Jattr;
import xmlcomponents.Jode;
import xmlcomponents.autoparse.annotation.XmlProperty;
import xmlcomponents.autoparse.annotation.XmlProperty.XmlPropertyType;
/**
* Class for automatically parsing simple nodes with some attributes.
*
* This parser is not smart.
*
* @author Justin Nelson
*/
public class AutoParser {
/**
* Parses a node into the given class type. with the same name. Use the XmlProperty annotation to customize how this
* parses your file.
*
* @param j
* the Jode to parse
* @param clazz
* the type to parse the Jode (j) into
* @return the object represented by the xml structure
*/
public static <T> T parse(Jode j, Class<T> clazz) {
T ret;
try {
ret = clazz.newInstance();
for (Field f : clazz.getDeclaredFields()) {
f.setAccessible(true);
Object fieldValue = resolveField(f, j);
f.set(ret, fieldValue);
}
return ret;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
private static Object resolveField(Field f, Jode j) throws IllegalAccessException, InstantiationException {
Class<?> type = f.getType();
XmlProperty anno = f.getAnnotation(XmlProperty.class);
String fieldName = getValueName(f);
if (anno != null && anno.type() == XmlPropertyType.TEXT_NODE) {
return j.v;
} else if (j.hasAttribute(fieldName)) {
Jattr attr = j.attribute(fieldName);
Converter<String, Object> converter = determineConverter(type);
return attr.value(converter);
} else if (j.hasSingleChild(fieldName) && !type.isInstance((Collection<?>) null)) {
Object complex = type.newInstance();
for (Field subF : complex.getClass().getDeclaredFields()) {
subF.setAccessible(true);
Object resolvedResult = resolveField(subF, j.single(fieldName));
if (resolvedResult != null || !subF.getType().isPrimitive()) {
subF.set(complex, resolvedResult);
}
}
return complex;
} else if (type.isInstance((List) (new ArrayList()))) {
ArrayList<Object> values = new ArrayList<Object>();
ParameterizedType genericType = (ParameterizedType) f.getGenericType();
Class realGenericType_finally = (Class) genericType.getActualTypeArguments()[0];
for (Jode subJ : j.children(fieldName)) {
values.add(parse(subJ, realGenericType_finally));
}
return values;
}
if (anno == null || !anno.optional()) {
throw new IllegalArgumentException("Could not parse field named '" + fieldName + "' for jode named '" + j.n
+ "'");
}
// By here, we know that the item we are trying to parse has not been able to be resolved, but it is optional,
// so we just return null.
return null;
}
private static String getValueName(Field f) {
XmlProperty anno = f.getAnnotation(XmlProperty.class);
if (anno != null && !anno.valueName().equals("")) {
return anno.valueName();
} else {
return f.getName();
}
}
private static String cammelToPascal(String s) {
return (s.charAt(0) + "").toUpperCase() + s.substring(1);
}
private static boolean empty(String s) {
return s == null || s.equals("");
}
private static Converter<String, Object> determineConverter(Class<?> type) {
if (type.equals(String.class)) {
return new Converter<String, Object>() {
@Override
public Object convert(String value) {
return value;
}
};
} else if (type.equals(Integer.class) || type.equals(int.class)) {
return new Converter<String, Object>() {
@Override
public Object convert(String value) {
if (empty(value))
return 0;
return Integer.parseInt(value);
}
};
} else if (type.equals(Long.class) || type.equals(long.class)) {
return new Converter<String, Object>() {
@Override
public Object convert(String value) {
if (empty(value))
return 0;
return Long.parseLong(value);
}
};
} else if (type.equals(Byte.class) || type.equals(byte.class)) {
return new Converter<String, Object>() {
@Override
public Object convert(String value) {
if (empty(value))
return 0;
return Byte.parseByte(value);
}
};
} else if (type.equals(Double.class) || type.equals(double.class)) {
return new Converter<String, Object>() {
@Override
public Object convert(String value) {
if (empty(value))
return 0;
return Double.parseDouble(value);
}
};
} else if (type.equals(Float.class) || type.equals(float.class)) {
return new Converter<String, Object>() {
@Override
public Object convert(String value) {
if (empty(value))
return 0;
return Float.parseFloat(value);
}
};
} else if (type.equals(Boolean.class) || type.equals(boolean.class)) {
return new Converter<String, Object>() {
@Override
public Object convert(String value) {
if (empty(value))
return false;
return Boolean.parseBoolean(value);
}
};
}
// Begin complex types
else if (type.equals(URL.class)) {
return new Converter<String, Object>() {
@Override
public Object convert(String value) {
try {
return new URL(value);
} catch (MalformedURLException e) {
throw new IllegalArgumentException("Could not convert the given value into a URL.");
}
}
};
}
throw new IllegalArgumentException("The type '" + type + "' is not supported in atribute parsing.");
}
}
| src/xmlcomponents/autoparse/AutoParser.java | package xmlcomponents.autoparse;
import java.lang.reflect.Field;
import java.lang.reflect.ParameterizedType;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import xmlcomponents.Converter;
import xmlcomponents.Jattr;
import xmlcomponents.Jode;
import xmlcomponents.autoparse.annotation.XmlProperty;
import xmlcomponents.autoparse.annotation.XmlProperty.XmlPropertyType;
/**
* Class for automatically parsing simple nodes with some attributes.
*
* This parser is not smart.
*
* @author Justin Nelson
*/
public class AutoParser {
/**
* Parses a node into the given class type. with the same name. Use the XmlProperty annotation to customize how this
* parses your file.
*
* @param j
* the Jode to parse
* @param clazz
* the type to parse the Jode (j) into
* @return the object represented by the xml structure
*/
public static <T> T parse(Jode j, Class<T> clazz) {
T ret;
try {
ret = clazz.newInstance();
for (Field f : clazz.getDeclaredFields()) {
f.setAccessible(true);
Object fieldValue = resolveField(f, j);
f.set(ret, fieldValue);
}
return ret;
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
private static Object resolveField(Field f, Jode j) throws IllegalAccessException, InstantiationException {
Class<?> type = f.getType();
XmlProperty anno = f.getAnnotation(XmlProperty.class);
String fieldName = getValueName(f);
if (anno != null && anno.type() == XmlPropertyType.TEXT_NODE) {
return j.v;
} else if (j.hasAttribute(fieldName)) {
Jattr attr = j.attribute(fieldName);
Converter<String, Object> converter = determineConverter(type);
return attr.value(converter);
} else if (j.hasSingleChild(fieldName) && !type.isInstance((Collection<?>) null)) {
Object complex = type.newInstance();
for (Field subF : complex.getClass().getDeclaredFields()) {
subF.setAccessible(true);
subF.set(complex, resolveField(subF, j.single(fieldName)));
}
return complex;
} else if (type.isInstance((List) (new ArrayList()))) {
ArrayList<Object> values = new ArrayList<Object>();
ParameterizedType genericType = (ParameterizedType) f.getGenericType();
Class realGenericType_finally = (Class) genericType.getActualTypeArguments()[0];
for (Jode subJ : j.children(fieldName)) {
values.add(parse(subJ, realGenericType_finally));
}
return values;
}
if (anno == null || !anno.optional()) {
throw new IllegalArgumentException("Could not parse field named '" + fieldName + "' for jode named '" + j.n
+ "'");
}
// By here, we know that the item we are trying to parse has not been able to be resolved, but it is optional,
// so we just return null.
return null;
}
private static String getValueName(Field f) {
XmlProperty anno = f.getAnnotation(XmlProperty.class);
if (anno != null && !anno.valueName().equals("")) {
return anno.valueName();
} else {
return f.getName();
}
}
private static String cammelToPascal(String s) {
return (s.charAt(0) + "").toUpperCase() + s.substring(1);
}
private static boolean empty(String s) {
return s == null || s.equals("");
}
private static Converter<String, Object> determineConverter(Class<?> type) {
if (type.equals(String.class)) {
return new Converter<String, Object>() {
@Override
public Object convert(String value) {
return value;
}
};
} else if (type.equals(Integer.class) || type.equals(int.class)) {
return new Converter<String, Object>() {
@Override
public Object convert(String value) {
if (empty(value))
return 0;
return Integer.parseInt(value);
}
};
} else if (type.equals(Long.class) || type.equals(long.class)) {
return new Converter<String, Object>() {
@Override
public Object convert(String value) {
if (empty(value))
return 0;
return Long.parseLong(value);
}
};
} else if (type.equals(Byte.class) || type.equals(byte.class)) {
return new Converter<String, Object>() {
@Override
public Object convert(String value) {
if (empty(value))
return 0;
return Byte.parseByte(value);
}
};
} else if (type.equals(Double.class) || type.equals(double.class)) {
return new Converter<String, Object>() {
@Override
public Object convert(String value) {
if (empty(value))
return 0;
return Double.parseDouble(value);
}
};
} else if (type.equals(Float.class) || type.equals(float.class)) {
return new Converter<String, Object>() {
@Override
public Object convert(String value) {
if (empty(value))
return 0;
return Float.parseFloat(value);
}
};
} else if (type.equals(Boolean.class) || type.equals(boolean.class)) {
return new Converter<String, Object>() {
@Override
public Object convert(String value) {
if (empty(value))
return false;
return Boolean.parseBoolean(value);
}
};
}
// Begin complex types
else if (type.equals(URL.class)) {
return new Converter<String, Object>() {
@Override
public Object convert(String value) {
try {
return new URL(value);
} catch (MalformedURLException e) {
throw new IllegalArgumentException("Could not convert the given value into a URL.");
}
}
};
}
throw new IllegalArgumentException("The type '" + type + "' is not supported in atribute parsing.");
}
}
| Fixed another bug in autoparser
| src/xmlcomponents/autoparse/AutoParser.java | Fixed another bug in autoparser |
|
Java | mit | cf72a9088ede8c9c15ba08b8721405dabc362cc5 | 0 | nilsschmidt1337/ldparteditor,nilsschmidt1337/ldparteditor | /* MIT - License
Copyright (c) 2012 - this year, Nils Schmidt
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
package org.nschmidt.ldparteditor.shells.editor3d;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.net.URLDecoder;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Set;
import java.util.TreeSet;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CTabItem;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.PaintEvent;
import org.eclipse.swt.events.PaintListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.FontData;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.opengl.GLCanvas;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.FileDialog;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Menu;
import org.eclipse.swt.widgets.MenuItem;
import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.wb.swt.SWTResourceManager;
import org.lwjgl.LWJGLException;
import org.lwjgl.opengl.GLContext;
import org.lwjgl.util.vector.Matrix4f;
import org.lwjgl.util.vector.Vector3f;
import org.lwjgl.util.vector.Vector4f;
import org.nschmidt.ldparteditor.composites.Composite3D;
import org.nschmidt.ldparteditor.composites.CompositeContainer;
import org.nschmidt.ldparteditor.composites.ToolItem;
import org.nschmidt.ldparteditor.composites.compositetab.CompositeTab;
import org.nschmidt.ldparteditor.data.DatFile;
import org.nschmidt.ldparteditor.data.DatType;
import org.nschmidt.ldparteditor.data.GColour;
import org.nschmidt.ldparteditor.data.GData;
import org.nschmidt.ldparteditor.data.GData1;
import org.nschmidt.ldparteditor.data.GDataPNG;
import org.nschmidt.ldparteditor.data.LibraryManager;
import org.nschmidt.ldparteditor.data.Matrix;
import org.nschmidt.ldparteditor.data.ReferenceParser;
import org.nschmidt.ldparteditor.data.Vertex;
import org.nschmidt.ldparteditor.data.VertexManager;
import org.nschmidt.ldparteditor.dialogs.colour.ColourDialog;
import org.nschmidt.ldparteditor.dialogs.copy.CopyDialog;
import org.nschmidt.ldparteditor.dialogs.edger2.EdgerDialog;
import org.nschmidt.ldparteditor.dialogs.intersector.IntersectorDialog;
import org.nschmidt.ldparteditor.dialogs.isecalc.IsecalcDialog;
import org.nschmidt.ldparteditor.dialogs.lines2pattern.Lines2PatternDialog;
import org.nschmidt.ldparteditor.dialogs.newproject.NewProjectDialog;
import org.nschmidt.ldparteditor.dialogs.pathtruder.PathTruderDialog;
import org.nschmidt.ldparteditor.dialogs.rectifier.RectifierDialog;
import org.nschmidt.ldparteditor.dialogs.rotate.RotateDialog;
import org.nschmidt.ldparteditor.dialogs.round.RoundDialog;
import org.nschmidt.ldparteditor.dialogs.scale.ScaleDialog;
import org.nschmidt.ldparteditor.dialogs.setcoordinates.CoordinatesDialog;
import org.nschmidt.ldparteditor.dialogs.slicerpro.SlicerProDialog;
import org.nschmidt.ldparteditor.dialogs.symsplitter.SymSplitterDialog;
import org.nschmidt.ldparteditor.dialogs.translate.TranslateDialog;
import org.nschmidt.ldparteditor.dialogs.txt2dat.Txt2DatDialog;
import org.nschmidt.ldparteditor.dialogs.unificator.UnificatorDialog;
import org.nschmidt.ldparteditor.dialogs.value.ValueDialog;
import org.nschmidt.ldparteditor.dialogs.value.ValueDialogInt;
import org.nschmidt.ldparteditor.enums.GLPrimitives;
import org.nschmidt.ldparteditor.enums.MergeTo;
import org.nschmidt.ldparteditor.enums.MouseButton;
import org.nschmidt.ldparteditor.enums.OpenInWhat;
import org.nschmidt.ldparteditor.enums.Threshold;
import org.nschmidt.ldparteditor.enums.TransformationMode;
import org.nschmidt.ldparteditor.enums.View;
import org.nschmidt.ldparteditor.enums.WorkingMode;
import org.nschmidt.ldparteditor.helpers.Manipulator;
import org.nschmidt.ldparteditor.helpers.ShellHelper;
import org.nschmidt.ldparteditor.helpers.Version;
import org.nschmidt.ldparteditor.helpers.WidgetSelectionHelper;
import org.nschmidt.ldparteditor.helpers.composite3d.Edger2Settings;
import org.nschmidt.ldparteditor.helpers.composite3d.IntersectorSettings;
import org.nschmidt.ldparteditor.helpers.composite3d.IsecalcSettings;
import org.nschmidt.ldparteditor.helpers.composite3d.PathTruderSettings;
import org.nschmidt.ldparteditor.helpers.composite3d.RectifierSettings;
import org.nschmidt.ldparteditor.helpers.composite3d.SelectorSettings;
import org.nschmidt.ldparteditor.helpers.composite3d.SlicerProSettings;
import org.nschmidt.ldparteditor.helpers.composite3d.SymSplitterSettings;
import org.nschmidt.ldparteditor.helpers.composite3d.TreeData;
import org.nschmidt.ldparteditor.helpers.composite3d.Txt2DatSettings;
import org.nschmidt.ldparteditor.helpers.composite3d.UnificatorSettings;
import org.nschmidt.ldparteditor.helpers.composite3d.ViewIdleManager;
import org.nschmidt.ldparteditor.helpers.compositetext.ProjectActions;
import org.nschmidt.ldparteditor.helpers.compositetext.SubfileCompiler;
import org.nschmidt.ldparteditor.helpers.math.MathHelper;
import org.nschmidt.ldparteditor.helpers.math.Vector3d;
import org.nschmidt.ldparteditor.i18n.I18n;
import org.nschmidt.ldparteditor.logger.NLogger;
import org.nschmidt.ldparteditor.main.LDPartEditor;
import org.nschmidt.ldparteditor.opengl.OpenGLRenderer;
import org.nschmidt.ldparteditor.project.Project;
import org.nschmidt.ldparteditor.resources.ResourceManager;
import org.nschmidt.ldparteditor.shells.editormeta.EditorMetaWindow;
import org.nschmidt.ldparteditor.shells.editortext.EditorTextWindow;
import org.nschmidt.ldparteditor.shells.searchnreplace.SearchWindow;
import org.nschmidt.ldparteditor.text.LDParsingException;
import org.nschmidt.ldparteditor.text.References;
import org.nschmidt.ldparteditor.text.TextTriangulator;
import org.nschmidt.ldparteditor.text.UTF8BufferedReader;
import org.nschmidt.ldparteditor.widgets.BigDecimalSpinner;
import org.nschmidt.ldparteditor.widgets.TreeItem;
import org.nschmidt.ldparteditor.widgets.ValueChangeAdapter;
import org.nschmidt.ldparteditor.workbench.Editor3DWindowState;
import org.nschmidt.ldparteditor.workbench.WorkbenchManager;
/**
* The 3D editor window
* <p>
* Note: This class should be instantiated once, it defines all listeners and
* part of the business logic.
*
* @author nils
*
*/
public class Editor3DWindow extends Editor3DDesign {
/** The window state of this window */
private Editor3DWindowState editor3DWindowState;
/** The reference to this window */
private static Editor3DWindow window;
/** The window state of this window */
private SearchWindow searchWindow;
public static final ArrayList<GLCanvas> canvasList = new ArrayList<GLCanvas>();
public static final ArrayList<OpenGLRenderer> renders = new ArrayList<OpenGLRenderer>();
private boolean addingSomething = false;
private boolean addingVertices = false;
private boolean addingLines = false;
private boolean addingTriangles = false;
private boolean addingQuads = false;
private boolean addingCondlines = false;
private boolean addingSubfiles = false;
private boolean movingAdjacentData = false;
private boolean noTransparentSelection = false;
private boolean bfcToggle = false;
private int workingType = WorkingMode.VERTICES;
private int workingAction = WorkingMode.SELECT;
private GColour lastUsedColour = new GColour(16, .5f, .5f, .5f, 1f);
private int transformationMode = WorkingMode.LOCAL;
private int snapSize = 1;
private Txt2DatSettings ts = new Txt2DatSettings();
private Edger2Settings es = new Edger2Settings();
private RectifierSettings rs = new RectifierSettings();
private IsecalcSettings is = new IsecalcSettings();
private SlicerProSettings ss = new SlicerProSettings();
private IntersectorSettings ins = new IntersectorSettings();
private PathTruderSettings ps = new PathTruderSettings();
private SymSplitterSettings sims = new SymSplitterSettings();
private UnificatorSettings us = new UnificatorSettings();
private SelectorSettings sels = new SelectorSettings();
private boolean updatingPngPictureTab;
private int pngPictureUpdateCounter = 0;
private final EditorMetaWindow metaWindow = new EditorMetaWindow();
private boolean updatingSelectionTab = true;
/**
* Create the application window.
*/
public Editor3DWindow() {
super();
final int[] i = new int[1];
final GLCanvas[] first1 = ViewIdleManager.firstCanvas;
final OpenGLRenderer[] first2 = ViewIdleManager.firstRender;
final int[] intervall = new int[] { 10 };
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
if (ViewIdleManager.pause[0].get()) {
ViewIdleManager.pause[0].set(false);
intervall[0] = 500;
} else {
final int cs = canvasList.size();
if (i[0] < cs && cs > 0) {
GLCanvas canvas;
if (!canvasList.get(i[0]).equals(first1[0])) {
canvas = first1[0];
if (canvas != null && !canvas.isDisposed()) {
first2[0].drawScene();
first1[0] = null;
first2[0] = null;
}
}
canvas = canvasList.get(i[0]);
if (!canvas.isDisposed()) {
if (renders.get(i[0]).getC3D().getRenderMode() != 5 || cs == 1) {
renders.get(i[0]).drawScene();
}
} else {
canvasList.remove(i[0]);
renders.remove(i[0]);
}
i[0]++;
} else {
i[0] = 0;
}
}
Display.getCurrent().timerExec(intervall[0], this);
intervall[0] = 10;
}
});
}
/**
* Run a fresh instance of this window
*/
public void run() {
window = this;
// Load the window state data
editor3DWindowState = WorkbenchManager.getEditor3DWindowState();
WorkbenchManager.setEditor3DWindow(this);
// Closing this window causes the whole application to quit
this.setBlockOnOpen(true);
// Creating the window to get the shell
this.create();
final Shell sh = this.getShell();
sh.setText(Version.getApplicationName());
sh.setImage(ResourceManager.getImage("imgDuke2.png")); //$NON-NLS-1$
sh.setMinimumSize(640, 480);
sh.setBounds(this.editor3DWindowState.getWindowState().getSizeAndPosition());
if (this.editor3DWindowState.getWindowState().isCentered()) {
ShellHelper.centerShellOnPrimaryScreen(sh);
}
// Maximize has to be called asynchronously
sh.getDisplay().asyncExec(new Runnable() {
@Override
public void run() {
sh.setMaximized(editor3DWindowState.getWindowState().isMaximized());
}
});
// MARK All final listeners will be configured here..
// First, create all menu actions.
createActions();
btn_Sync[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
resetSearch();
int[][] stats = new int[13][3];
stats[0] = LibraryManager.syncProjectElements(treeItem_Project[0]);
stats[5] = LibraryManager.syncUnofficialParts(treeItem_UnofficialParts[0]);
stats[6] = LibraryManager.syncUnofficialSubparts(treeItem_UnofficialSubparts[0]);
stats[7] = LibraryManager.syncUnofficialPrimitives(treeItem_UnofficialPrimitives[0]);
stats[8] = LibraryManager.syncUnofficialHiResPrimitives(treeItem_UnofficialPrimitives48[0]);
stats[9] = LibraryManager.syncOfficialParts(treeItem_OfficialParts[0]);
stats[10] = LibraryManager.syncOfficialSubparts(treeItem_OfficialSubparts[0]);
stats[11] = LibraryManager.syncOfficialPrimitives(treeItem_OfficialPrimitives[0]);
stats[12] = LibraryManager.syncOfficialHiResPrimitives(treeItem_OfficialPrimitives48[0]);
int additions = 0;
int deletions = 0;
int conflicts = 0;
for (int[] is : stats) {
additions += is[0];
deletions += is[1];
conflicts += is[2];
}
txt_Search[0].setText(" "); //$NON-NLS-1$
txt_Search[0].setText(""); //$NON-NLS-1$
Set<DatFile> dfs = new HashSet<DatFile>();
for (OpenGLRenderer renderer : renders) {
dfs.add(renderer.getC3D().getLockableDatFileReference());
}
for (EditorTextWindow w : Project.getOpenTextWindows()) {
for (CTabItem t : w.getTabFolder().getItems()) {
DatFile txtDat = ((CompositeTab) t).getState().getFileNameObj();
if (txtDat != null) {
dfs.add(txtDat);
}
}
}
for (DatFile df : dfs) {
SubfileCompiler.compile(df);
}
for (EditorTextWindow w : Project.getOpenTextWindows()) {
for (CTabItem t : w.getTabFolder().getItems()) {
DatFile txtDat = ((CompositeTab) t).getState().getFileNameObj();
if (txtDat != null) {
((CompositeTab) t).parseForError();
((CompositeTab) t).getTextComposite().redraw();
((CompositeTab) t).getState().getTab().setText(((CompositeTab) t).getState().getFilenameWithStar());
}
}
}
updateTree_unsavedEntries();
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_INFORMATION | SWT.OK);
messageBox.setText(I18n.DIALOG_SyncTitle);
Object[] messageArguments = {additions, deletions, conflicts};
MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$
formatter.setLocale(View.LOCALE);
formatter.applyPattern(I18n.DIALOG_Sync);
messageBox.setMessage(formatter.format(messageArguments));
messageBox.open();
}
});
btn_New[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
ProjectActions.createNewProject(Editor3DWindow.getWindow(), false);
}
});
btn_Open[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (ProjectActions.openProject()) {
Project.create(false);
treeItem_Project[0].setData(Project.getProjectPath());
resetSearch();
LibraryManager.readProjectPartsParent(treeItem_ProjectParts[0]);
LibraryManager.readProjectParts(treeItem_ProjectParts[0]);
LibraryManager.readProjectSubparts(treeItem_ProjectSubparts[0]);
LibraryManager.readProjectPrimitives(treeItem_ProjectPrimitives[0]);
LibraryManager.readProjectHiResPrimitives(treeItem_ProjectPrimitives48[0]);
treeItem_OfficialParts[0].setData(null);
txt_Search[0].setText(" "); //$NON-NLS-1$
txt_Search[0].setText(""); //$NON-NLS-1$
updateTree_unsavedEntries();
}
}
});
btn_Save[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (treeParts[0].getSelectionCount() == 1) {
if (treeParts[0].getSelection()[0].getData() instanceof DatFile) {
DatFile df = (DatFile) treeParts[0].getSelection()[0].getData();
if (!df.isReadOnly() && Project.getUnsavedFiles().contains(df)) {
if (df.save()) {
Editor3DWindow.getWindow().updateTree_unsavedEntries();
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.OK);
messageBoxError.setText(I18n.DIALOG_Error);
messageBoxError.setMessage(I18n.DIALOG_CantSaveFile);
messageBoxError.open();
Editor3DWindow.getWindow().updateTree_unsavedEntries();
}
}
} else if (treeParts[0].getSelection()[0].getData() instanceof ArrayList<?>) {
NLogger.debug(getClass(), "Saving all files from this group"); //$NON-NLS-1$
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> dfs = (ArrayList<DatFile>) treeParts[0].getSelection()[0].getData();
for (DatFile df : dfs) {
if (!df.isReadOnly() && Project.getUnsavedFiles().contains(df)) {
if (df.save()) {
Project.removeUnsavedFile(df);
Editor3DWindow.getWindow().updateTree_unsavedEntries();
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.OK);
messageBoxError.setText(I18n.DIALOG_Error);
messageBoxError.setMessage(I18n.DIALOG_CantSaveFile);
messageBoxError.open();
Editor3DWindow.getWindow().updateTree_unsavedEntries();
}
}
}
}
} else if (treeParts[0].getSelection()[0].getData() instanceof String) {
if (treeParts[0].getSelection()[0].equals(treeItem_Project[0])) {
NLogger.debug(getClass(), "Save the project..."); //$NON-NLS-1$
if (Project.isDefaultProject()) {
ProjectActions.createNewProject(Editor3DWindow.getWindow(), true);
}
iterateOverItems(treeItem_ProjectParts[0]);
iterateOverItems(treeItem_ProjectSubparts[0]);
iterateOverItems(treeItem_ProjectPrimitives[0]);
iterateOverItems(treeItem_ProjectPrimitives48[0]);
} else if (treeParts[0].getSelection()[0].equals(treeItem_Unofficial[0])) {
iterateOverItems(treeItem_UnofficialParts[0]);
iterateOverItems(treeItem_UnofficialSubparts[0]);
iterateOverItems(treeItem_UnofficialPrimitives[0]);
iterateOverItems(treeItem_UnofficialPrimitives48[0]);
}
NLogger.debug(getClass(), "Saving all files from this group to"); //$NON-NLS-1$
NLogger.debug(getClass(), (String) treeParts[0].getSelection()[0].getData());
}
} else {
NLogger.debug(getClass(), "Save the project..."); //$NON-NLS-1$
if (Project.isDefaultProject()) {
ProjectActions.createNewProject(Editor3DWindow.getWindow(), true);
}
}
}
private void iterateOverItems(TreeItem ti) {
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> dfs = (ArrayList<DatFile>) ti.getData();
for (DatFile df : dfs) {
if (!df.isReadOnly() && Project.getUnsavedFiles().contains(df)) {
if (df.save()) {
Project.removeUnsavedFile(df);
Editor3DWindow.getWindow().updateTree_unsavedEntries();
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.OK);
messageBoxError.setText(I18n.DIALOG_Error);
messageBoxError.setMessage(I18n.DIALOG_CantSaveFile);
messageBoxError.open();
Editor3DWindow.getWindow().updateTree_unsavedEntries();
}
}
}
}
}
});
btn_SaveAll[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
HashSet<DatFile> dfs = new HashSet<DatFile>(Project.getUnsavedFiles());
for (DatFile df : dfs) {
if (!df.isReadOnly()) {
if (df.save()) {
Project.removeUnsavedFile(df);
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.OK);
messageBoxError.setText(I18n.DIALOG_Error);
messageBoxError.setMessage(I18n.DIALOG_CantSaveFile);
messageBoxError.open();
}
}
}
if (Project.isDefaultProject()) {
ProjectActions.createNewProject(getWindow(), true);
}
Editor3DWindow.getWindow().updateTree_unsavedEntries();
}
});
btn_NewDat[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
createNewDatFile(getShell(), OpenInWhat.EDITOR_TEXT_AND_3D);
}
});
btn_OpenDat[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
openDatFile(getShell(), OpenInWhat.EDITOR_TEXT_AND_3D);
}
});
btn_Select[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickBtnTest(btn_Select[0]);
workingAction = WorkingMode.SELECT;
}
});
btn_Move[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickBtnTest(btn_Move[0]);
workingAction = WorkingMode.MOVE;
}
});
btn_Rotate[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickBtnTest(btn_Rotate[0]);
workingAction = WorkingMode.ROTATE;
}
});
btn_Scale[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickBtnTest(btn_Scale[0]);
workingAction = WorkingMode.SCALE;
}
});
btn_Combined[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickBtnTest(btn_Combined[0]);
workingAction = WorkingMode.COMBINED;
}
});
btn_Local[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickBtnTest(btn_Local[0]);
transformationMode = WorkingMode.LOCAL;
}
});
btn_Global[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickBtnTest(btn_Global[0]);
transformationMode = WorkingMode.GLOBAL;
}
});
btn_Vertices[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickBtnTest(btn_Vertices[0]);
setWorkingType(WorkingMode.VERTICES);
}
});
btn_TrisNQuads[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickBtnTest(btn_TrisNQuads[0]);
setWorkingType(WorkingMode.FACES);
}
});
btn_Lines[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickBtnTest(btn_Lines[0]);
setWorkingType(WorkingMode.LINES);
}
});
btn_Subfiles[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
clickBtnTest(btn_Subfiles[0]);
setWorkingType(WorkingMode.SUBFILES);
}
}
});
btn_AddComment[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (!metaWindow.isOpened()) {
metaWindow.run();
} else {
metaWindow.open();
}
}
});
btn_AddVertex[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
resetAddState();
clickSingleBtn(btn_AddVertex[0]);
setAddingVertices(btn_AddVertex[0].getSelection());
setAddingSomething(isAddingVertices());
}
});
btn_AddPrimitive[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
resetAddState();
setAddingSubfiles(btn_AddPrimitive[0].getSelection());
setAddingSomething(isAddingSubfiles());
clickSingleBtn(btn_AddPrimitive[0]);
}
});
btn_AddLine[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
resetAddState();
setAddingLines(btn_AddLine[0].getSelection());
setAddingSomething(isAddingLines());
clickSingleBtn(btn_AddLine[0]);
}
});
btn_AddTriangle[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
resetAddState();
setAddingTriangles(btn_AddTriangle[0].getSelection());
setAddingSomething(isAddingTriangles());
clickSingleBtn(btn_AddTriangle[0]);
}
});
btn_AddQuad[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
resetAddState();
setAddingQuads(btn_AddQuad[0].getSelection());
setAddingSomething(isAddingQuads());
clickSingleBtn(btn_AddQuad[0]);
}
});
btn_AddCondline[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
resetAddState();
setAddingCondlines(btn_AddCondline[0].getSelection());
setAddingSomething(isAddingCondlines());
clickSingleBtn(btn_AddCondline[0]);
}
});
btn_MoveAdjacentData[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickSingleBtn(btn_MoveAdjacentData[0]);
setMovingAdjacentData(btn_MoveAdjacentData[0].getSelection());
}
});
btn_CompileSubfile[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
SubfileCompiler.compile(Project.getFileToEdit());
}
}
});
btn_lineSize1[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
GLPrimitives.SPHERE = GLPrimitives.SPHERE1;
GLPrimitives.SPHERE_INV = GLPrimitives.SPHERE_INV1;
View.lineWidth1000[0] = 25f;
View.lineWidth[0] = .025f;
View.lineWidthGL[0] = .375f;
Set<DatFile> dfs = new HashSet<DatFile>();
for (OpenGLRenderer renderer : renders) {
dfs.add(renderer.getC3D().getLockableDatFileReference());
}
for (DatFile df : dfs) {
SubfileCompiler.compile(df);
}
clickSingleBtn(btn_lineSize1[0]);
}
});
btn_lineSize2[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
GLPrimitives.SPHERE = GLPrimitives.SPHERE2;
GLPrimitives.SPHERE_INV = GLPrimitives.SPHERE_INV2;
View.lineWidth1000[0] = 50f;
View.lineWidth[0] = .050f;
View.lineWidthGL[0] = .75f;
Set<DatFile> dfs = new HashSet<DatFile>();
for (OpenGLRenderer renderer : renders) {
dfs.add(renderer.getC3D().getLockableDatFileReference());
}
for (DatFile df : dfs) {
SubfileCompiler.compile(df);
}
clickSingleBtn(btn_lineSize2[0]);
}
});
btn_lineSize3[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
GLPrimitives.SPHERE = GLPrimitives.SPHERE3;
GLPrimitives.SPHERE_INV = GLPrimitives.SPHERE_INV3;
View.lineWidth1000[0] = 100f;
View.lineWidth[0] = .100f;
View.lineWidthGL[0] = 1.5f;
Set<DatFile> dfs = new HashSet<DatFile>();
for (OpenGLRenderer renderer : renders) {
dfs.add(renderer.getC3D().getLockableDatFileReference());
}
for (DatFile df : dfs) {
SubfileCompiler.compile(df);
}
clickSingleBtn(btn_lineSize3[0]);
}
});
btn_lineSize4[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
GLPrimitives.SPHERE = GLPrimitives.SPHERE4;
GLPrimitives.SPHERE_INV = GLPrimitives.SPHERE_INV4;
View.lineWidth1000[0] = 200f;
View.lineWidth[0] = .200f;
View.lineWidthGL[0] = 3f;
Set<DatFile> dfs = new HashSet<DatFile>();
for (OpenGLRenderer renderer : renders) {
dfs.add(renderer.getC3D().getLockableDatFileReference());
}
for (DatFile df : dfs) {
SubfileCompiler.compile(df);
}
clickSingleBtn(btn_lineSize4[0]);
}
});
btn_BFCswap[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
Project.getFileToEdit().getVertexManager().windingChangeSelection();
}
}
});
btn_RoundSelection[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
if ((e.stateMask & SWT.CTRL) == SWT.CTRL) {
new RoundDialog(getShell()).open();
}
Project.getFileToEdit().getVertexManager()
.roundSelection(WorkbenchManager.getUserSettingState().getCoordsPrecision(), WorkbenchManager.getUserSettingState().getTransMatrixPrecision(), isMovingAdjacentData(), true);
}
}
});
btn_Pipette[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
VertexManager vm = Project.getFileToEdit().getVertexManager();
final GColour gColour2 = vm.getRandomSelectedColour(lastUsedColour);
setLastUsedColour(gColour2);
btn_LastUsedColour[0].removeListener(SWT.Paint, btn_LastUsedColour[0].getListeners(SWT.Paint)[0]);
btn_LastUsedColour[0].removeListener(SWT.Selection, btn_LastUsedColour[0].getListeners(SWT.Selection)[0]);
final Color col = SWTResourceManager.getColor((int) (gColour2.getR() * 255f), (int) (gColour2.getG() * 255f), (int) (gColour2.getB() * 255f));
final Point size = btn_LastUsedColour[0].computeSize(SWT.DEFAULT, SWT.DEFAULT);
final int x = size.x / 4;
final int y = size.y / 4;
final int w = size.x / 2;
final int h = size.y / 2;
int num = gColour2.getColourNumber();
btn_LastUsedColour[0].addPaintListener(new PaintListener() {
@Override
public void paintControl(PaintEvent e) {
e.gc.setBackground(col);
e.gc.fillRectangle(x, y, w, h);
if (gColour2.getA() == 1f) {
e.gc.drawImage(ResourceManager.getImage("icon16_transparent.png"), 0, 0, 16, 16, x, y, w, h); //$NON-NLS-1$
} else {
e.gc.drawImage(ResourceManager.getImage("icon16_halftrans.png"), 0, 0, 16, 16, x, y, w, h); //$NON-NLS-1$
}
}
});
btn_LastUsedColour[0].addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
int num = gColour2.getColourNumber();
if (!View.hasLDConfigColour(num)) {
num = -1;
}
Project.getFileToEdit().getVertexManager().colourChangeSelection(num, gColour2.getR(), gColour2.getG(), gColour2.getB(), gColour2.getA());
}
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {
}
});
if (num != -1) {
btn_LastUsedColour[0].setToolTipText("Colour [" + num + "]: " + View.getLDConfigColourName(num)); //$NON-NLS-1$ //$NON-NLS-2$ I18N
} else {
StringBuilder colourBuilder = new StringBuilder();
colourBuilder.append("0x2"); //$NON-NLS-1$
colourBuilder.append(MathHelper.toHex((int) (255f * gColour2.getR())).toUpperCase());
colourBuilder.append(MathHelper.toHex((int) (255f * gColour2.getG())).toUpperCase());
colourBuilder.append(MathHelper.toHex((int) (255f * gColour2.getB())).toUpperCase());
btn_LastUsedColour[0].setToolTipText("Colour [" + colourBuilder.toString() + "]"); //$NON-NLS-1$ //$NON-NLS-2$ I18N
}
btn_LastUsedColour[0].redraw();
}
}
});
btn_Palette[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
final GColour[] gColour2 = new GColour[1];
new ColourDialog(getShell(), gColour2).open();
if (gColour2[0] != null) {
setLastUsedColour(gColour2[0]);
int num = gColour2[0].getColourNumber();
if (!View.hasLDConfigColour(num)) {
num = -1;
}
Project.getFileToEdit().getVertexManager().colourChangeSelection(num, gColour2[0].getR(), gColour2[0].getG(), gColour2[0].getB(), gColour2[0].getA());
btn_LastUsedColour[0].removeListener(SWT.Paint, btn_LastUsedColour[0].getListeners(SWT.Paint)[0]);
btn_LastUsedColour[0].removeListener(SWT.Selection, btn_LastUsedColour[0].getListeners(SWT.Selection)[0]);
final Color col = SWTResourceManager.getColor((int) (gColour2[0].getR() * 255f), (int) (gColour2[0].getG() * 255f), (int) (gColour2[0].getB() * 255f));
final Point size = btn_LastUsedColour[0].computeSize(SWT.DEFAULT, SWT.DEFAULT);
final int x = size.x / 4;
final int y = size.y / 4;
final int w = size.x / 2;
final int h = size.y / 2;
btn_LastUsedColour[0].addPaintListener(new PaintListener() {
@Override
public void paintControl(PaintEvent e) {
e.gc.setBackground(col);
e.gc.fillRectangle(x, y, w, h);
if (gColour2[0].getA() == 1f) {
e.gc.drawImage(ResourceManager.getImage("icon16_transparent.png"), 0, 0, 16, 16, x, y, w, h); //$NON-NLS-1$
} else {
e.gc.drawImage(ResourceManager.getImage("icon16_halftrans.png"), 0, 0, 16, 16, x, y, w, h); //$NON-NLS-1$
}
}
});
btn_LastUsedColour[0].addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
int num = gColour2[0].getColourNumber();
if (!View.hasLDConfigColour(num)) {
num = -1;
}
Project.getFileToEdit().getVertexManager().colourChangeSelection(num, gColour2[0].getR(), gColour2[0].getG(), gColour2[0].getB(), gColour2[0].getA());
}
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {
}
});
if (num != -1) {
btn_LastUsedColour[0].setToolTipText("Colour [" + num + "]: " + View.getLDConfigColourName(num)); //$NON-NLS-1$ //$NON-NLS-2$ I18N
} else {
StringBuilder colourBuilder = new StringBuilder();
colourBuilder.append("0x2"); //$NON-NLS-1$
colourBuilder.append(MathHelper.toHex((int) (255f * gColour2[0].getR())).toUpperCase());
colourBuilder.append(MathHelper.toHex((int) (255f * gColour2[0].getG())).toUpperCase());
colourBuilder.append(MathHelper.toHex((int) (255f * gColour2[0].getB())).toUpperCase());
btn_LastUsedColour[0].setToolTipText("Colour [" + colourBuilder.toString() + "]"); //$NON-NLS-1$ //$NON-NLS-2$ I18N
}
btn_LastUsedColour[0].redraw();
}
}
}
});
btn_Coarse[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
BigDecimal m = WorkbenchManager.getUserSettingState().getCoarse_move_snap();
BigDecimal r = WorkbenchManager.getUserSettingState().getCoarse_rotate_snap();
BigDecimal s = WorkbenchManager.getUserSettingState().getCoarse_scale_snap();
snapSize = 2;
spn_Move[0].setValue(m);
spn_Rotate[0].setValue(r);
spn_Scale[0].setValue(s);
Manipulator.setSnap(m, r, s);
}
});
btn_Medium[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
BigDecimal m = WorkbenchManager.getUserSettingState().getMedium_move_snap();
BigDecimal r = WorkbenchManager.getUserSettingState().getMedium_rotate_snap();
BigDecimal s = WorkbenchManager.getUserSettingState().getMedium_scale_snap();
snapSize = 1;
spn_Move[0].setValue(m);
spn_Rotate[0].setValue(r);
spn_Scale[0].setValue(s);
Manipulator.setSnap(m, r, s);
}
});
btn_Fine[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
BigDecimal m = WorkbenchManager.getUserSettingState().getFine_move_snap();
BigDecimal r = WorkbenchManager.getUserSettingState().getFine_rotate_snap();
BigDecimal s = WorkbenchManager.getUserSettingState().getFine_scale_snap();
snapSize = 0;
spn_Move[0].setValue(m);
spn_Rotate[0].setValue(r);
spn_Scale[0].setValue(s);
Manipulator.setSnap(m, r, s);
}
});
btn_SplitQuad[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null && !Project.getFileToEdit().isReadOnly()) {
Project.getFileToEdit().getVertexManager().splitQuads(true);
}
}
});
spn_Move[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
BigDecimal m, r, s;
m = spn.getValue();
switch (snapSize) {
case 0:
WorkbenchManager.getUserSettingState().setFine_move_snap(m);
r = WorkbenchManager.getUserSettingState().getFine_rotate_snap();
s = WorkbenchManager.getUserSettingState().getFine_scale_snap();
break;
case 2:
WorkbenchManager.getUserSettingState().setCoarse_move_snap(m);
r = WorkbenchManager.getUserSettingState().getCoarse_rotate_snap();
s = WorkbenchManager.getUserSettingState().getCoarse_scale_snap();
break;
default:
WorkbenchManager.getUserSettingState().setMedium_move_snap(m);
r = WorkbenchManager.getUserSettingState().getMedium_rotate_snap();
s = WorkbenchManager.getUserSettingState().getMedium_scale_snap();
break;
}
Manipulator.setSnap(m, r, s);
}
});
spn_Rotate[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
BigDecimal m, r, s;
r = spn.getValue();
switch (snapSize) {
case 0:
m = WorkbenchManager.getUserSettingState().getFine_move_snap();
WorkbenchManager.getUserSettingState().setFine_rotate_snap(r);
s = WorkbenchManager.getUserSettingState().getFine_scale_snap();
break;
case 2:
m = WorkbenchManager.getUserSettingState().getCoarse_move_snap();
WorkbenchManager.getUserSettingState().setCoarse_rotate_snap(r);
s = WorkbenchManager.getUserSettingState().getCoarse_scale_snap();
break;
default:
m = WorkbenchManager.getUserSettingState().getMedium_move_snap();
WorkbenchManager.getUserSettingState().setMedium_rotate_snap(r);
s = WorkbenchManager.getUserSettingState().getMedium_scale_snap();
break;
}
Manipulator.setSnap(m, r, s);
}
});
spn_Scale[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
BigDecimal m, r, s;
s = spn.getValue();
switch (snapSize) {
case 0:
m = WorkbenchManager.getUserSettingState().getFine_move_snap();
r = WorkbenchManager.getUserSettingState().getFine_rotate_snap();
WorkbenchManager.getUserSettingState().setFine_scale_snap(s);
break;
case 2:
m = WorkbenchManager.getUserSettingState().getCoarse_move_snap();
r = WorkbenchManager.getUserSettingState().getCoarse_rotate_snap();
WorkbenchManager.getUserSettingState().setCoarse_scale_snap(s);
break;
default:
m = WorkbenchManager.getUserSettingState().getMedium_move_snap();
r = WorkbenchManager.getUserSettingState().getMedium_rotate_snap();
WorkbenchManager.getUserSettingState().setMedium_scale_snap(s);
break;
}
Manipulator.setSnap(m, r, s);
}
});
btn_PreviousSelection[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
updatingSelectionTab = true;
NLogger.debug(getClass(), "Previous Selection..."); //$NON-NLS-1$
final DatFile df = Project.getFileToEdit();
if (df != null) {
final VertexManager vm = df.getVertexManager();
final int count = vm.getSelectedData().size();
if (count > 0) {
boolean breakIt = false;
boolean firstRun = true;
while (true) {
int index = vm.getSelectedItemIndex();
index--;
if (index < 0) {
index = count - 1;
if (!firstRun) breakIt = true;
}
firstRun = false;
vm.setSelectedItemIndex(index);
final GData gdata = (GData) vm.getSelectedData().toArray()[index];
if (vm.isNotInSubfileAndLinetype2to5(gdata)) {
vm.setSelectedLine(gdata);
disableSelectionTab();
updatingSelectionTab = true;
switch (gdata.type()) {
case 5:
case 4:
spn_SelectionX4[0].setEnabled(true);
spn_SelectionY4[0].setEnabled(true);
spn_SelectionZ4[0].setEnabled(true);
case 3:
spn_SelectionX3[0].setEnabled(true);
spn_SelectionY3[0].setEnabled(true);
spn_SelectionZ3[0].setEnabled(true);
case 2:
spn_SelectionX1[0].setEnabled(true);
spn_SelectionY1[0].setEnabled(true);
spn_SelectionZ1[0].setEnabled(true);
spn_SelectionX2[0].setEnabled(true);
spn_SelectionY2[0].setEnabled(true);
spn_SelectionZ2[0].setEnabled(true);
txt_Line[0].setText(gdata.toString());
breakIt = true;
switch (gdata.type()) {
case 5:
BigDecimal[] g5 = vm.getPreciseCoordinates(gdata);
spn_SelectionX1[0].setValue(g5[0]);
spn_SelectionY1[0].setValue(g5[1]);
spn_SelectionZ1[0].setValue(g5[2]);
spn_SelectionX2[0].setValue(g5[3]);
spn_SelectionY2[0].setValue(g5[4]);
spn_SelectionZ2[0].setValue(g5[5]);
spn_SelectionX3[0].setValue(g5[6]);
spn_SelectionY3[0].setValue(g5[7]);
spn_SelectionZ3[0].setValue(g5[8]);
spn_SelectionX4[0].setValue(g5[9]);
spn_SelectionY4[0].setValue(g5[10]);
spn_SelectionZ4[0].setValue(g5[11]);
break;
case 4:
BigDecimal[] g4 = vm.getPreciseCoordinates(gdata);
spn_SelectionX1[0].setValue(g4[0]);
spn_SelectionY1[0].setValue(g4[1]);
spn_SelectionZ1[0].setValue(g4[2]);
spn_SelectionX2[0].setValue(g4[3]);
spn_SelectionY2[0].setValue(g4[4]);
spn_SelectionZ2[0].setValue(g4[5]);
spn_SelectionX3[0].setValue(g4[6]);
spn_SelectionY3[0].setValue(g4[7]);
spn_SelectionZ3[0].setValue(g4[8]);
spn_SelectionX4[0].setValue(g4[9]);
spn_SelectionY4[0].setValue(g4[10]);
spn_SelectionZ4[0].setValue(g4[11]);
break;
case 3:
BigDecimal[] g3 = vm.getPreciseCoordinates(gdata);
spn_SelectionX1[0].setValue(g3[0]);
spn_SelectionY1[0].setValue(g3[1]);
spn_SelectionZ1[0].setValue(g3[2]);
spn_SelectionX2[0].setValue(g3[3]);
spn_SelectionY2[0].setValue(g3[4]);
spn_SelectionZ2[0].setValue(g3[5]);
spn_SelectionX3[0].setValue(g3[6]);
spn_SelectionY3[0].setValue(g3[7]);
spn_SelectionZ3[0].setValue(g3[8]);
break;
case 2:
BigDecimal[] g2 = vm.getPreciseCoordinates(gdata);
spn_SelectionX1[0].setValue(g2[0]);
spn_SelectionY1[0].setValue(g2[1]);
spn_SelectionZ1[0].setValue(g2[2]);
spn_SelectionX2[0].setValue(g2[3]);
spn_SelectionY2[0].setValue(g2[4]);
spn_SelectionZ2[0].setValue(g2[5]);
break;
default:
disableSelectionTab();
updatingSelectionTab = true;
break;
}
lbl_SelectionX1[0].setText(I18n.EDITOR3D_PositionX1 + " {" + spn_SelectionX1[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionY1[0].setText(I18n.EDITOR3D_PositionY1 + " {" + spn_SelectionY1[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionZ1[0].setText(I18n.EDITOR3D_PositionZ1 + " {" + spn_SelectionZ1[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionX2[0].setText(I18n.EDITOR3D_PositionX2 + " {" + spn_SelectionX2[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionY2[0].setText(I18n.EDITOR3D_PositionY2 + " {" + spn_SelectionY2[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionZ2[0].setText(I18n.EDITOR3D_PositionZ2 + " {" + spn_SelectionZ2[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionX3[0].setText(I18n.EDITOR3D_PositionX3 + " {" + spn_SelectionX3[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionY3[0].setText(I18n.EDITOR3D_PositionY3 + " {" + spn_SelectionY3[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionZ3[0].setText(I18n.EDITOR3D_PositionZ3 + " {" + spn_SelectionZ3[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionX4[0].setText(I18n.EDITOR3D_PositionX4 + " {" + spn_SelectionX4[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionY4[0].setText(I18n.EDITOR3D_PositionY4 + " {" + spn_SelectionY4[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionZ4[0].setText(I18n.EDITOR3D_PositionZ4 + " {" + spn_SelectionZ4[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionX1[0].getParent().layout();
updatingSelectionTab = false;
break;
default:
disableSelectionTab();
break;
}
} else {
disableSelectionTab();
}
if (breakIt) break;
}
} else {
disableSelectionTab();
}
} else {
disableSelectionTab();
}
updatingSelectionTab = false;
}
});
btn_NextSelection[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
updatingSelectionTab = true;
NLogger.debug(getClass(), "Next Selection..."); //$NON-NLS-1$
final DatFile df = Project.getFileToEdit();
if (df != null) {
final VertexManager vm = df.getVertexManager();
final int count = vm.getSelectedData().size();
if (count > 0) {
boolean breakIt = false;
boolean firstRun = true;
while (true) {
int index = vm.getSelectedItemIndex();
index++;
if (index >= count) {
index = 0;
if (!firstRun) breakIt = true;
}
firstRun = false;
vm.setSelectedItemIndex(index);
final GData gdata = (GData) vm.getSelectedData().toArray()[index];
if (vm.isNotInSubfileAndLinetype2to5(gdata)) {
vm.setSelectedLine(gdata);
disableSelectionTab();
updatingSelectionTab = true;
switch (gdata.type()) {
case 5:
case 4:
spn_SelectionX4[0].setEnabled(true);
spn_SelectionY4[0].setEnabled(true);
spn_SelectionZ4[0].setEnabled(true);
case 3:
spn_SelectionX3[0].setEnabled(true);
spn_SelectionY3[0].setEnabled(true);
spn_SelectionZ3[0].setEnabled(true);
case 2:
spn_SelectionX1[0].setEnabled(true);
spn_SelectionY1[0].setEnabled(true);
spn_SelectionZ1[0].setEnabled(true);
spn_SelectionX2[0].setEnabled(true);
spn_SelectionY2[0].setEnabled(true);
spn_SelectionZ2[0].setEnabled(true);
txt_Line[0].setText(gdata.toString());
breakIt = true;
switch (gdata.type()) {
case 5:
BigDecimal[] g5 = vm.getPreciseCoordinates(gdata);
spn_SelectionX1[0].setValue(g5[0]);
spn_SelectionY1[0].setValue(g5[1]);
spn_SelectionZ1[0].setValue(g5[2]);
spn_SelectionX2[0].setValue(g5[3]);
spn_SelectionY2[0].setValue(g5[4]);
spn_SelectionZ2[0].setValue(g5[5]);
spn_SelectionX3[0].setValue(g5[6]);
spn_SelectionY3[0].setValue(g5[7]);
spn_SelectionZ3[0].setValue(g5[8]);
spn_SelectionX4[0].setValue(g5[9]);
spn_SelectionY4[0].setValue(g5[10]);
spn_SelectionZ4[0].setValue(g5[11]);
break;
case 4:
BigDecimal[] g4 = vm.getPreciseCoordinates(gdata);
spn_SelectionX1[0].setValue(g4[0]);
spn_SelectionY1[0].setValue(g4[1]);
spn_SelectionZ1[0].setValue(g4[2]);
spn_SelectionX2[0].setValue(g4[3]);
spn_SelectionY2[0].setValue(g4[4]);
spn_SelectionZ2[0].setValue(g4[5]);
spn_SelectionX3[0].setValue(g4[6]);
spn_SelectionY3[0].setValue(g4[7]);
spn_SelectionZ3[0].setValue(g4[8]);
spn_SelectionX4[0].setValue(g4[9]);
spn_SelectionY4[0].setValue(g4[10]);
spn_SelectionZ4[0].setValue(g4[11]);
break;
case 3:
BigDecimal[] g3 = vm.getPreciseCoordinates(gdata);
spn_SelectionX1[0].setValue(g3[0]);
spn_SelectionY1[0].setValue(g3[1]);
spn_SelectionZ1[0].setValue(g3[2]);
spn_SelectionX2[0].setValue(g3[3]);
spn_SelectionY2[0].setValue(g3[4]);
spn_SelectionZ2[0].setValue(g3[5]);
spn_SelectionX3[0].setValue(g3[6]);
spn_SelectionY3[0].setValue(g3[7]);
spn_SelectionZ3[0].setValue(g3[8]);
break;
case 2:
BigDecimal[] g2 = vm.getPreciseCoordinates(gdata);
spn_SelectionX1[0].setValue(g2[0]);
spn_SelectionY1[0].setValue(g2[1]);
spn_SelectionZ1[0].setValue(g2[2]);
spn_SelectionX2[0].setValue(g2[3]);
spn_SelectionY2[0].setValue(g2[4]);
spn_SelectionZ2[0].setValue(g2[5]);
break;
default:
disableSelectionTab();
updatingSelectionTab = true;
break;
}
lbl_SelectionX1[0].setText(I18n.EDITOR3D_PositionX1 + " {" + spn_SelectionX1[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionY1[0].setText(I18n.EDITOR3D_PositionY1 + " {" + spn_SelectionY1[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionZ1[0].setText(I18n.EDITOR3D_PositionZ1 + " {" + spn_SelectionZ1[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionX2[0].setText(I18n.EDITOR3D_PositionX2 + " {" + spn_SelectionX2[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionY2[0].setText(I18n.EDITOR3D_PositionY2 + " {" + spn_SelectionY2[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionZ2[0].setText(I18n.EDITOR3D_PositionZ2 + " {" + spn_SelectionZ2[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionX3[0].setText(I18n.EDITOR3D_PositionX3 + " {" + spn_SelectionX3[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionY3[0].setText(I18n.EDITOR3D_PositionY3 + " {" + spn_SelectionY3[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionZ3[0].setText(I18n.EDITOR3D_PositionZ3 + " {" + spn_SelectionZ3[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionX4[0].setText(I18n.EDITOR3D_PositionX4 + " {" + spn_SelectionX4[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionY4[0].setText(I18n.EDITOR3D_PositionY4 + " {" + spn_SelectionY4[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionZ4[0].setText(I18n.EDITOR3D_PositionZ4 + " {" + spn_SelectionZ4[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionX1[0].getParent().layout();
break;
default:
disableSelectionTab();
break;
}
} else {
disableSelectionTab();
}
if (breakIt) break;
}
} else {
disableSelectionTab();
}
} else {
disableSelectionTab();
}
updatingSelectionTab = false;
}
});
final ValueChangeAdapter va = new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
if (updatingSelectionTab) return;
final GData newLine = Project.getFileToEdit().getVertexManager().updateSelectedLine(
spn_SelectionX1[0].getValue(), spn_SelectionY1[0].getValue(), spn_SelectionZ1[0].getValue(),
spn_SelectionX2[0].getValue(), spn_SelectionY2[0].getValue(), spn_SelectionZ2[0].getValue(),
spn_SelectionX3[0].getValue(), spn_SelectionY3[0].getValue(), spn_SelectionZ3[0].getValue(),
spn_SelectionX4[0].getValue(), spn_SelectionY4[0].getValue(), spn_SelectionZ4[0].getValue(),
btn_MoveAdjacentData2[0].getSelection()
);
if (newLine == null) {
disableSelectionTab();
} else {
txt_Line[0].setText(newLine.toString());
}
}
};
spn_SelectionX1[0].addValueChangeListener(va);
spn_SelectionY1[0].addValueChangeListener(va);
spn_SelectionZ1[0].addValueChangeListener(va);
spn_SelectionX2[0].addValueChangeListener(va);
spn_SelectionY2[0].addValueChangeListener(va);
spn_SelectionZ2[0].addValueChangeListener(va);
spn_SelectionX3[0].addValueChangeListener(va);
spn_SelectionY3[0].addValueChangeListener(va);
spn_SelectionZ3[0].addValueChangeListener(va);
spn_SelectionX4[0].addValueChangeListener(va);
spn_SelectionY4[0].addValueChangeListener(va);
spn_SelectionZ4[0].addValueChangeListener(va);
// treeParts[0].addSelectionListener(new SelectionAdapter() {
// @Override
// public void widgetSelected(final SelectionEvent e) {
//
// }
// });
treeParts[0].addListener(SWT.MouseDown, new Listener() {
@Override
public void handleEvent(Event event) {
if (event.button == MouseButton.RIGHT) {
NLogger.debug(getClass(), "Showing context menu."); //$NON-NLS-1$
try {
if (treeParts[0].getTree().getMenu() != null) {
treeParts[0].getTree().getMenu().dispose();
}
} catch (Exception ex) {}
Menu treeMenu = new Menu(treeParts[0].getTree());
treeParts[0].getTree().setMenu(treeMenu);
mnu_treeMenu[0] = treeMenu;
MenuItem mntmOpenIn3DEditor = new MenuItem(treeMenu, I18n.I18N_NON_BIDIRECT());
mntm_OpenIn3DEditor[0] = mntmOpenIn3DEditor;
mntmOpenIn3DEditor.setEnabled(true);
mntmOpenIn3DEditor.setText("Open In 3D Editor"); //$NON-NLS-1$ I18N Needs translation!
MenuItem mntmOpenInTextEditor = new MenuItem(treeMenu, I18n.I18N_NON_BIDIRECT());
mntm_OpenInTextEditor[0] = mntmOpenInTextEditor;
mntmOpenInTextEditor.setEnabled(true);
mntmOpenInTextEditor.setText("Open In Text Editor"); //$NON-NLS-1$ I18N Needs translation!
@SuppressWarnings("unused")
MenuItem mntm_Separator = new MenuItem(treeMenu, I18n.I18N_NON_BIDIRECT() | SWT.SEPARATOR);
MenuItem mntmRename = new MenuItem(treeMenu, I18n.I18N_NON_BIDIRECT());
mntm_Rename[0] = mntmRename;
mntmRename.setEnabled(true);
mntmRename.setText("Rename / Move"); //$NON-NLS-1$ I18N Needs translation!
MenuItem mntmRevert = new MenuItem(treeMenu, I18n.I18N_NON_BIDIRECT());
mntm_Revert[0] = mntmRevert;
mntmRevert.setEnabled(true);
mntmRevert.setText("Revert All Changes"); //$NON-NLS-1$ I18N Needs translation!
MenuItem mntmDelete = new MenuItem(treeMenu, I18n.I18N_NON_BIDIRECT());
mntm_Delete[0] = mntmDelete;
mntmDelete.setEnabled(true);
mntmDelete.setText("Delete"); //$NON-NLS-1$ I18N Needs translation!
@SuppressWarnings("unused")
MenuItem mntm_Separator2 = new MenuItem(treeMenu, I18n.I18N_NON_BIDIRECT() | SWT.SEPARATOR);
MenuItem mntmCopyToUnofficial = new MenuItem(treeMenu, I18n.I18N_NON_BIDIRECT());
mntm_CopyToUnofficial[0] = mntmCopyToUnofficial;
mntmCopyToUnofficial.setEnabled(true);
mntmCopyToUnofficial.setText("Copy To Unofficial Library"); //$NON-NLS-1$ I18N Needs translation!
mntm_OpenInTextEditor[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (treeParts[0].getSelectionCount() == 1 && treeParts[0].getSelection()[0] != null && treeParts[0].getSelection()[0].getData() instanceof DatFile) {
DatFile df = (DatFile) treeParts[0].getSelection()[0].getData();
for (EditorTextWindow w : Project.getOpenTextWindows()) {
for (CTabItem t : w.getTabFolder().getItems()) {
if (df.equals(((CompositeTab) t).getState().getFileNameObj())) {
w.getTabFolder().setSelection(t);
((CompositeTab) t).getControl().getShell().forceActive();
w.open();
df.getVertexManager().setUpdated(true);
return;
}
}
}
// Project.getParsedFiles().add(df); IS NECESSARY HERE
Project.getParsedFiles().add(df);
new EditorTextWindow().run(df);
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_INFORMATION | SWT.OK);
messageBoxError.setText(I18n.DIALOG_UnavailableTitle);
messageBoxError.setMessage(I18n.DIALOG_Unavailable);
messageBoxError.open();
}
cleanupClosedData();
}
});
mntm_OpenIn3DEditor[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (treeParts[0].getSelectionCount() == 1 && treeParts[0].getSelection()[0] != null && treeParts[0].getSelection()[0].getData() instanceof DatFile) {
if (renders.isEmpty()) {
if ("%EMPTY%".equals(Editor3DWindow.getSashForm().getChildren()[1].getData())) { //$NON-NLS-1$
int[] mainSashWeights = Editor3DWindow.getSashForm().getWeights();
Editor3DWindow.getSashForm().getChildren()[1].dispose();
CompositeContainer cmp_Container = new CompositeContainer(Editor3DWindow.getSashForm(), false);
cmp_Container.moveBelow(Editor3DWindow.getSashForm().getChildren()[0]);
DatFile df = (DatFile) treeParts[0].getSelection()[0].getData();
df.parseForData();
Project.setFileToEdit(df);
cmp_Container.getComposite3D().setLockableDatFileReference(df);
Editor3DWindow.getSashForm().getParent().layout();
Editor3DWindow.getSashForm().setWeights(mainSashWeights);
}
} else {
boolean canUpdate = false;
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (!c3d.isDatFileLockedOnDisplay()) {
canUpdate = true;
break;
}
}
if (canUpdate) {
DatFile df = (DatFile) treeParts[0].getSelection()[0].getData();
final VertexManager vm = df.getVertexManager();
if (vm.isModified()) {
df.setText(df.getText());
}
df.parseForData();
Project.setFileToEdit(df);
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (!c3d.isDatFileLockedOnDisplay()) {
c3d.setLockableDatFileReference(df);
vm.zoomToFit(c3d);
}
}
}
}
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_INFORMATION | SWT.OK);
messageBoxError.setText(I18n.DIALOG_UnavailableTitle);
messageBoxError.setMessage(I18n.DIALOG_Unavailable);
messageBoxError.open();
}
cleanupClosedData();
}
});
mntm_Revert[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (treeParts[0].getSelectionCount() == 1 && treeParts[0].getSelection()[0] != null && treeParts[0].getSelection()[0].getData() instanceof DatFile) {
DatFile df = (DatFile) treeParts[0].getSelection()[0].getData();
if (df.isReadOnly() || !Project.getUnsavedFiles().contains(df) || df.isVirtual() && df.getText().trim().isEmpty()) return;
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_QUESTION | SWT.YES | SWT.NO);
messageBox.setText(I18n.DIALOG_RevertTitle);
Object[] messageArguments = {df.getShortName(), df.getLastSavedOpened()};
MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$
formatter.setLocale(View.LOCALE);
formatter.applyPattern(I18n.DIALOG_Revert);
messageBox.setMessage(formatter.format(messageArguments));
int result = messageBox.open();
if (result == SWT.NO) {
return;
}
boolean canUpdate = false;
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(df)) {
canUpdate = true;
break;
}
}
EditorTextWindow tmpW = null;
CTabItem tmpT = null;
for (EditorTextWindow w : Project.getOpenTextWindows()) {
for (CTabItem t : w.getTabFolder().getItems()) {
if (df.equals(((CompositeTab) t).getState().getFileNameObj())) {
canUpdate = true;
tmpW = w;
tmpT = t;
break;
}
}
}
df.setText(df.getOriginalText());
df.setOldName(df.getNewName());
if (!df.isVirtual()) {
Project.removeUnsavedFile(df);
updateTree_unsavedEntries();
}
if (canUpdate) {
df.parseForData();
df.getVertexManager().setModified(true);
if (tmpW != null) {
tmpW.getTabFolder().setSelection(tmpT);
((CompositeTab) tmpT).getControl().getShell().forceActive();
tmpW.open();
((CompositeTab) tmpT).getTextComposite().forceFocus();
}
}
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_INFORMATION | SWT.OK);
messageBoxError.setText(I18n.DIALOG_UnavailableTitle);
messageBoxError.setMessage(I18n.DIALOG_Unavailable);
messageBoxError.open();
}
}
});
mntm_Delete[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (treeParts[0].getSelectionCount() == 1 && treeParts[0].getSelection()[0] != null && treeParts[0].getSelection()[0].getData() instanceof DatFile) {
DatFile df = (DatFile) treeParts[0].getSelection()[0].getData();
if (df.isReadOnly()) {
if (treeParts[0].getSelection()[0].getParentItem().getParentItem() == treeItem_Project[0]) {
updateTree_removeEntry(df);
cleanupClosedData();
}
return;
}
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_WARNING | SWT.YES | SWT.NO);
messageBox.setText(I18n.DIALOG_DeleteTitle);
Object[] messageArguments = {df.getShortName()};
MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$
formatter.setLocale(View.LOCALE);
formatter.applyPattern(I18n.DIALOG_Delete);
messageBox.setMessage(formatter.format(messageArguments));
int result = messageBox.open();
if (result == SWT.NO) {
return;
}
updateTree_removeEntry(df);
try {
File f = new File(df.getOldName());
if (f.exists()) {
f.delete();
}
} catch (Exception ex) {}
cleanupClosedData();
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_INFORMATION | SWT.OK);
messageBoxError.setText(I18n.DIALOG_UnavailableTitle);
messageBoxError.setMessage(I18n.DIALOG_Unavailable);
messageBoxError.open();
}
}
});
mntm_Rename[0].addSelectionListener(new SelectionAdapter() {
@SuppressWarnings("unchecked")
@Override
public void widgetSelected(SelectionEvent e) {
if (treeParts[0].getSelectionCount() == 1 && treeParts[0].getSelection()[0] != null && treeParts[0].getSelection()[0].getData() instanceof DatFile) {
DatFile df = (DatFile) treeParts[0].getSelection()[0].getData();
if (df.isReadOnly()) return;
FileDialog dlg = new FileDialog(Editor3DWindow.getWindow().getShell(), SWT.SAVE);
File tmp = new File(df.getNewName());
dlg.setFilterPath(tmp.getAbsolutePath().substring(0, tmp.getAbsolutePath().length() - tmp.getName().length()));
dlg.setFileName(tmp.getName());
dlg.setFilterExtensions(new String[]{"*.dat"}); //$NON-NLS-1$
dlg.setOverwrite(true);
// Change the title bar text
dlg.setText(I18n.DIALOG_RenameOrMove);
// Calling open() will open and run the dialog.
// It will return the selected file, or
// null if user cancels
String newPath = dlg.open();
if (newPath != null) {
while (isFileNameAllocated(newPath, df, false)) {
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.RETRY | SWT.CANCEL);
messageBox.setText(I18n.DIALOG_AlreadyAllocatedNameTitle);
messageBox.setMessage(I18n.DIALOG_AlreadyAllocatedName);
int result = messageBox.open();
if (result == SWT.CANCEL) {
return;
}
newPath = dlg.open();
if (newPath == null) return;
}
if (df.isProjectFile() && !newPath.startsWith(Project.getProjectPath())) {
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_WARNING | SWT.YES | SWT.NO);
messageBox.setText(I18n.DIALOG_NoProjectLocationTitle);
Object[] messageArguments = {new File(newPath).getName()};
MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$
formatter.setLocale(View.LOCALE);
formatter.applyPattern(I18n.DIALOG_NoProjectLocation);
messageBox.setMessage(formatter.format(messageArguments));
int result = messageBox.open();
if (result == SWT.NO) {
return;
}
}
df.setNewName(newPath);
if (!df.getOldName().equals(df.getNewName())) {
if (!Project.getUnsavedFiles().contains(df)) {
df.parseForData();
df.getVertexManager().setModified(true);
Project.getUnsavedFiles().add(df);
}
} else {
if (df.getText().equals(df.getOriginalText()) && df.getOldName().equals(df.getNewName())) {
Project.removeUnsavedFile(df);
}
}
df.setProjectFile(df.getNewName().startsWith(Project.getProjectPath()));
HashSet<EditorTextWindow> windows = new HashSet<EditorTextWindow>(Project.getOpenTextWindows());
for (EditorTextWindow win : windows) {
win.updateTabWithDatfile(df);
}
updateTree_renamedEntries();
updateTree_unsavedEntries();
}
} else if (treeParts[0].getSelectionCount() == 1 && treeParts[0].getSelection()[0] != null && treeParts[0].getSelection()[0].equals(treeItem_Project[0])) {
if (Project.isDefaultProject()) {
ProjectActions.createNewProject(Editor3DWindow.getWindow(), true);
} else {
int result = new NewProjectDialog(true).open();
if (result == IDialogConstants.OK_ID && !Project.getTempProjectPath().equals(Project.getProjectPath())) {
try {
while (new File(Project.getTempProjectPath()).isDirectory()) {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.YES | SWT.CANCEL | SWT.NO);
messageBoxError.setText(I18n.PROJECT_ProjectOverwriteTitle);
messageBoxError.setMessage(I18n.PROJECT_ProjectOverwrite);
int result2 = messageBoxError.open();
if (result2 == SWT.CANCEL) {
return;
} else if (result2 == SWT.YES) {
break;
} else {
result = new NewProjectDialog(true).open();
if (result == IDialogConstants.CANCEL_ID) {
return;
}
}
}
Project.copyFolder(new File(Project.getProjectPath()), new File(Project.getTempProjectPath()));
Project.deleteFolder(new File(Project.getProjectPath()));
// Linked project parts need a new path, because they were copied to a new directory
String defaultPrefix = new File(Project.getProjectPath()).getAbsolutePath() + File.separator;
String projectPrefix = new File(Project.getTempProjectPath()).getAbsolutePath() + File.separator;
Editor3DWindow.getWindow().getProjectParts().getParentItem().setData(Project.getTempProjectPath());
HashSet<DatFile> projectFiles = new HashSet<DatFile>();
projectFiles.addAll((ArrayList<DatFile>) Editor3DWindow.getWindow().getProjectParts().getData());
projectFiles.addAll((ArrayList<DatFile>) Editor3DWindow.getWindow().getProjectSubparts().getData());
projectFiles.addAll((ArrayList<DatFile>) Editor3DWindow.getWindow().getProjectPrimitives().getData());
projectFiles.addAll((ArrayList<DatFile>) Editor3DWindow.getWindow().getProjectPrimitives48().getData());
for (DatFile df : projectFiles) {
boolean isUnsaved = Project.getUnsavedFiles().contains(df);
boolean isParsed = Project.getParsedFiles().contains(df);
Project.getParsedFiles().remove(df);
Project.getUnsavedFiles().remove(df);
String newName = df.getNewName();
String oldName = df.getOldName();
df.updateLastModified();
if (!newName.startsWith(projectPrefix) && newName.startsWith(defaultPrefix)) {
df.setNewName(projectPrefix + newName.substring(defaultPrefix.length()));
}
if (!oldName.startsWith(projectPrefix) && oldName.startsWith(defaultPrefix)) {
df.setOldName(projectPrefix + oldName.substring(defaultPrefix.length()));
}
df.setProjectFile(df.getNewName().startsWith(Project.getProjectPath()));
if (isUnsaved) Project.addUnsavedFile(df);
if (isParsed) Project.getParsedFiles().add(df);
}
Project.setProjectName(Project.getTempProjectName());
Project.setProjectPath(Project.getTempProjectPath());
Editor3DWindow.getWindow().getProjectParts().getParentItem().setText(Project.getProjectName());
updateTree_unsavedEntries();
Project.updateEditor();
Editor3DWindow.getWindow().getShell().update();
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
}
}
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_INFORMATION | SWT.OK);
messageBoxError.setText(I18n.DIALOG_UnavailableTitle);
messageBoxError.setMessage(I18n.DIALOG_Unavailable);
messageBoxError.open();
}
}
});
mntm_CopyToUnofficial[0] .addSelectionListener(new SelectionAdapter() {
@SuppressWarnings("unchecked")
@Override
public void widgetSelected(SelectionEvent e) {
if (treeParts[0].getSelectionCount() == 1 && treeParts[0].getSelection()[0] != null && treeParts[0].getSelection()[0].getData() instanceof DatFile) {
DatFile df = (DatFile) treeParts[0].getSelection()[0].getData();
TreeItem p = treeParts[0].getSelection()[0].getParentItem();
String targetPath_u;
String targetPath_l;
String targetPathDir_u;
String targetPathDir_l;
TreeItem targetTreeItem;
boolean projectIsFileOrigin = false;
if (treeItem_ProjectParts[0].equals(p)) {
targetPath_u = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "PARTS"; //$NON-NLS-1$
targetPath_l = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "parts"; //$NON-NLS-1$
targetTreeItem = treeItem_UnofficialParts[0];
projectIsFileOrigin = true;
} else if (treeItem_ProjectPrimitives[0].equals(p)) {
targetPath_u = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "P"; //$NON-NLS-1$
targetPath_l = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "p"; //$NON-NLS-1$
targetTreeItem = treeItem_UnofficialPrimitives[0];
projectIsFileOrigin = true;
} else if (treeItem_ProjectPrimitives48[0].equals(p)) {
targetPath_u = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "P" + File.separator + "48"; //$NON-NLS-1$ //$NON-NLS-2$
targetPath_l = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "p" + File.separator + "48"; //$NON-NLS-1$ //$NON-NLS-2$
targetTreeItem = treeItem_UnofficialPrimitives48[0];
projectIsFileOrigin = true;
} else if (treeItem_ProjectSubparts[0].equals(p)) {
targetPath_u = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "PARTS"+ File.separator + "S"; //$NON-NLS-1$ //$NON-NLS-2$
targetPath_l = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "parts"+ File.separator + "s"; //$NON-NLS-1$ //$NON-NLS-2$
targetTreeItem = treeItem_UnofficialSubparts[0];
projectIsFileOrigin = true;
} else if (treeItem_OfficialParts[0].equals(p)) {
targetPath_u = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "PARTS"; //$NON-NLS-1$
targetPath_l = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "parts"; //$NON-NLS-1$
targetTreeItem = treeItem_UnofficialParts[0];
} else if (treeItem_OfficialPrimitives[0].equals(p)) {
targetPath_u = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "P"; //$NON-NLS-1$
targetPath_l = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "p"; //$NON-NLS-1$
targetTreeItem = treeItem_UnofficialPrimitives[0];
} else if (treeItem_OfficialPrimitives48[0].equals(p)) {
targetPath_u = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "P" + File.separator + "48"; //$NON-NLS-1$ //$NON-NLS-2$
targetPath_l = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "p" + File.separator + "48"; //$NON-NLS-1$ //$NON-NLS-2$
targetTreeItem = treeItem_UnofficialPrimitives48[0];
} else if (treeItem_OfficialSubparts[0].equals(p)) {
targetPath_u = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "PARTS"+ File.separator + "S"; //$NON-NLS-1$ //$NON-NLS-2$
targetPath_l = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "parts"+ File.separator + "s"; //$NON-NLS-1$ //$NON-NLS-2$
targetTreeItem = treeItem_UnofficialSubparts[0];
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_INFORMATION | SWT.OK);
messageBoxError.setText(I18n.DIALOG_UnavailableTitle);
messageBoxError.setMessage(I18n.DIALOG_Unavailable);
messageBoxError.open();
return;
}
targetPathDir_l = targetPath_l;
targetPathDir_u = targetPath_u;
final String newName = new File(df.getNewName()).getName();
targetPath_u = targetPath_u + File.separator + newName;
targetPath_l = targetPath_l + File.separator + newName;
DatFile fileToOverwrite_u = new DatFile(targetPath_u);
DatFile fileToOverwrite_l = new DatFile(targetPath_l);
DatFile targetFile = null;
TreeItem[] folders = new TreeItem[4];
folders[0] = treeItem_UnofficialParts[0];
folders[1] = treeItem_UnofficialPrimitives[0];
folders[2] = treeItem_UnofficialPrimitives48[0];
folders[3] = treeItem_UnofficialSubparts[0];
for (TreeItem folder : folders) {
ArrayList<DatFile> cachedReferences =(ArrayList<DatFile>) folder.getData();
for (DatFile d : cachedReferences) {
if (fileToOverwrite_u.equals(d) || fileToOverwrite_l.equals(d)) {
targetFile = d;
break;
}
}
}
if (new File(targetPath_u).exists() || new File(targetPath_l).exists() || targetFile != null) {
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_WARNING | SWT.OK | SWT.CANCEL);
messageBox.setText(I18n.DIALOG_ReplaceTitle);
Object[] messageArguments = {newName};
MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$
formatter.setLocale(View.LOCALE);
formatter.applyPattern(I18n.DIALOG_Replace);
messageBox.setMessage(formatter.format(messageArguments));
int result = messageBox.open();
if (result == SWT.CANCEL) {
return;
}
}
ArrayList<ArrayList<DatFile>> refResult = null;
if (new File(targetPathDir_l).exists() || new File(targetPathDir_u).exists()) {
if (targetFile == null) {
int result = new CopyDialog(getShell(), new File(df.getNewName()).getName()).open();
switch (result) {
case IDialogConstants.OK_ID:
// Copy File Only
break;
case IDialogConstants.NO_ID:
// Copy File and required and related
if (projectIsFileOrigin) {
refResult = ReferenceParser.checkForReferences(df, References.REQUIRED_AND_RELATED, treeItem_Project[0], treeItem_Unofficial[0], treeItem_Official[0]);
} else {
refResult = ReferenceParser.checkForReferences(df, References.REQUIRED_AND_RELATED, treeItem_Official[0], treeItem_Unofficial[0], treeItem_Project[0]);
}
break;
case IDialogConstants.YES_ID:
// Copy File and required
if (projectIsFileOrigin) {
refResult = ReferenceParser.checkForReferences(df, References.REQUIRED, treeItem_Project[0], treeItem_Unofficial[0], treeItem_Official[0]);
} else {
refResult = ReferenceParser.checkForReferences(df, References.REQUIRED, treeItem_Official[0], treeItem_Unofficial[0], treeItem_Project[0]);
}
break;
default:
return;
}
DatFile newDatFile = new DatFile(new File(targetPathDir_l).exists() ? targetPath_l : targetPath_u);
// Text exchange includes description exchange
newDatFile.setText(df.getText());
newDatFile.saveForced();
newDatFile.setType(df.getType());
((ArrayList<DatFile>) targetTreeItem.getData()).add(newDatFile);
TreeItem ti = new TreeItem(targetTreeItem, SWT.NONE);
ti.setText(new File(df.getNewName()).getName());
ti.setData(newDatFile);
} else if (targetFile.equals(df)) { // This can only happen if the user opens the unofficial parts folder as a project
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.OK);
messageBox.setText(I18n.DIALOG_AlreadyAllocatedNameTitle);
messageBox.setMessage(I18n.DIALOG_AlreadyAllocatedName);
messageBox.open();
return;
} else {
int result = new CopyDialog(getShell(), new File(df.getNewName()).getName()).open();
switch (result) {
case IDialogConstants.OK_ID:
// Copy File Only
break;
case IDialogConstants.NO_ID:
// Copy File and required and related
if (projectIsFileOrigin) {
refResult = ReferenceParser.checkForReferences(df, References.REQUIRED_AND_RELATED, treeItem_Project[0], treeItem_Unofficial[0], treeItem_Official[0]);
} else {
refResult = ReferenceParser.checkForReferences(df, References.REQUIRED_AND_RELATED, treeItem_Official[0], treeItem_Unofficial[0], treeItem_Project[0]);
}
break;
case IDialogConstants.YES_ID:
// Copy File and required
if (projectIsFileOrigin) {
refResult = ReferenceParser.checkForReferences(df, References.REQUIRED, treeItem_Project[0], treeItem_Unofficial[0], treeItem_Official[0]);
} else {
refResult = ReferenceParser.checkForReferences(df, References.REQUIRED, treeItem_Official[0], treeItem_Unofficial[0], treeItem_Project[0]);
}
break;
default:
return;
}
targetFile.disposeData();
updateTree_removeEntry(targetFile);
DatFile newDatFile = new DatFile(new File(targetPathDir_l).exists() ? targetPath_l : targetPath_u);
newDatFile.setText(df.getText());
newDatFile.saveForced();
((ArrayList<DatFile>) targetTreeItem.getData()).add(newDatFile);
TreeItem ti = new TreeItem(targetTreeItem, SWT.NONE);
ti.setText(new File(df.getNewName()).getName());
ti.setData(newDatFile);
}
if (refResult != null) {
// Remove old data
for(int i = 0; i < 4; i++) {
ArrayList<DatFile> toRemove = refResult.get(i);
for (DatFile datToRemove : toRemove) {
datToRemove.disposeData();
updateTree_removeEntry(datToRemove);
}
}
// Create new data
TreeItem[] targetTrees = new TreeItem[]{treeItem_UnofficialParts[0], treeItem_UnofficialSubparts[0], treeItem_UnofficialPrimitives[0], treeItem_UnofficialPrimitives48[0]};
for(int i = 4; i < 8; i++) {
ArrayList<DatFile> toCreate = refResult.get(i);
for (DatFile datToCreate : toCreate) {
DatFile newDatFile = new DatFile(datToCreate.getOldName());
String source = datToCreate.getTextDirect();
newDatFile.setText(source);
newDatFile.setOriginalText(source);
newDatFile.saveForced();
newDatFile.setType(datToCreate.getType());
((ArrayList<DatFile>) targetTrees[i - 4].getData()).add(newDatFile);
TreeItem ti = new TreeItem(targetTrees[i - 4], SWT.NONE);
ti.setText(new File(datToCreate.getOldName()).getName());
ti.setData(newDatFile);
}
}
}
updateTree_unsavedEntries();
}
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_INFORMATION | SWT.OK);
messageBoxError.setText(I18n.DIALOG_UnavailableTitle);
messageBoxError.setMessage(I18n.DIALOG_Unavailable);
messageBoxError.open();
}
}
});
java.awt.Point b = java.awt.MouseInfo.getPointerInfo().getLocation();
final int x = (int) b.getX();
final int y = (int) b.getY();
Menu menu = mnu_treeMenu[0];
menu.setLocation(x, y);
menu.setVisible(true);
}
}
});
treeParts[0].addListener(SWT.MouseDoubleClick, new Listener() {
@Override
public void handleEvent(Event event) {
if (treeParts[0].getSelectionCount() == 1 && treeParts[0].getSelection()[0] != null) {
treeParts[0].getSelection()[0].setVisible(!treeParts[0].getSelection()[0].isVisible());
TreeItem sel = treeParts[0].getSelection()[0];
sh.getDisplay().asyncExec(new Runnable() {
@Override
public void run() {
treeParts[0].build();
}
});
treeParts[0].redraw();
treeParts[0].update();
treeParts[0].getTree().select(treeParts[0].getMapInv().get(sel));
}
}
});
txt_Search[0].addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
search(txt_Search[0].getText());
}
});
btn_ResetSearch[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
txt_Search[0].setText(""); //$NON-NLS-1$
}
});
btn_Hide[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) Project.getFileToEdit().getVertexManager().hideSelection();
}
});
btn_ShowAll[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) Project.getFileToEdit().getVertexManager().showAll();
}
});
btn_NoTransparentSelection[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
setNoTransparentSelection(btn_NoTransparentSelection[0].getSelection());
}
});
btn_BFCToggle[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
setBfcToggle(btn_BFCToggle[0].getSelection());
}
});
btn_Delete[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) Project.getFileToEdit().getVertexManager().delete(Editor3DWindow.getWindow().isMovingAdjacentData(), true);
}
});
btn_Copy[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) Project.getFileToEdit().getVertexManager().copy();
}
});
btn_Cut[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
Project.getFileToEdit().getVertexManager().copy();
Project.getFileToEdit().getVertexManager().delete(false, true);
}
}
});
btn_Paste[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) Project.getFileToEdit().getVertexManager().paste();
}
});
btn_Manipulator_0_toOrigin[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
c3d.getManipulator().reset();
}
}
}
}
});
btn_Manipulator_XIII_toWorld[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Vector4f t = new Vector4f(c3d.getManipulator().getPosition());
BigDecimal[] T = c3d.getManipulator().getAccuratePosition();
c3d.getManipulator().reset();
c3d.getManipulator().getPosition().set(t);
c3d.getManipulator().setAccuratePosition(T[0], T[1], T[2]);
;
}
}
}
}
});
btn_Manipulator_X_XReverse[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Vector4f.sub(new Vector4f(0f, 0f, 0f, 2f), c3d.getManipulator().getXaxis(), c3d.getManipulator().getXaxis());
BigDecimal[] a = c3d.getManipulator().getAccurateXaxis();
c3d.getManipulator().setAccurateXaxis(a[0].negate(), a[1].negate(), a[2].negate());
}
}
}
});
btn_Manipulator_XI_YReverse[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Vector4f.sub(new Vector4f(0f, 0f, 0f, 2f), c3d.getManipulator().getYaxis(), c3d.getManipulator().getYaxis());
BigDecimal[] a = c3d.getManipulator().getAccurateYaxis();
c3d.getManipulator().setAccurateYaxis(a[0].negate(), a[1].negate(), a[2].negate());
}
}
}
});
btn_Manipulator_XII_ZReverse[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Vector4f.sub(new Vector4f(0f, 0f, 0f, 2f), c3d.getManipulator().getZaxis(), c3d.getManipulator().getZaxis());
BigDecimal[] a = c3d.getManipulator().getAccurateZaxis();
c3d.getManipulator().setAccurateZaxis(a[0].negate(), a[1].negate(), a[2].negate());
}
}
}
});
btn_Manipulator_SwitchXY[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Vector4f temp = new Vector4f(c3d.getManipulator().getXaxis());
c3d.getManipulator().getXaxis().set(c3d.getManipulator().getYaxis());
c3d.getManipulator().getYaxis().set(temp);
BigDecimal[] a = c3d.getManipulator().getAccurateXaxis().clone();
BigDecimal[] b = c3d.getManipulator().getAccurateYaxis().clone();
c3d.getManipulator().setAccurateXaxis(b[0], b[1], b[2]);
c3d.getManipulator().setAccurateYaxis(a[0], a[1], a[2]);
}
}
}
});
btn_Manipulator_SwitchXZ[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Vector4f temp = new Vector4f(c3d.getManipulator().getXaxis());
c3d.getManipulator().getXaxis().set(c3d.getManipulator().getZaxis());
c3d.getManipulator().getZaxis().set(temp);
BigDecimal[] a = c3d.getManipulator().getAccurateXaxis().clone();
BigDecimal[] b = c3d.getManipulator().getAccurateZaxis().clone();
c3d.getManipulator().setAccurateXaxis(b[0], b[1], b[2]);
c3d.getManipulator().setAccurateZaxis(a[0], a[1], a[2]);
}
}
}
});
btn_Manipulator_SwitchYZ[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Vector4f temp = new Vector4f(c3d.getManipulator().getZaxis());
c3d.getManipulator().getZaxis().set(c3d.getManipulator().getYaxis());
c3d.getManipulator().getYaxis().set(temp);
BigDecimal[] a = c3d.getManipulator().getAccurateYaxis().clone();
BigDecimal[] b = c3d.getManipulator().getAccurateZaxis().clone();
c3d.getManipulator().setAccurateYaxis(b[0], b[1], b[2]);
c3d.getManipulator().setAccurateZaxis(a[0], a[1], a[2]);
}
}
}
});
btn_Manipulator_1_cameraToPos[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
Vector4f pos = c3d.getManipulator().getPosition();
Vector4f a1 = c3d.getManipulator().getXaxis();
Vector4f a2 = c3d.getManipulator().getYaxis();
Vector4f a3 = c3d.getManipulator().getZaxis();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Matrix4f rot = new Matrix4f();
Matrix4f.setIdentity(rot);
rot.m00 = a1.x;
rot.m10 = a1.y;
rot.m20 = a1.z;
rot.m01 = a2.x;
rot.m11 = a2.y;
rot.m21 = a2.z;
rot.m02 = a3.x;
rot.m12 = a3.y;
rot.m22 = a3.z;
c3d.getRotation().load(rot);
Matrix4f trans = new Matrix4f();
Matrix4f.setIdentity(trans);
trans.translate(new Vector3f(-pos.x, -pos.y, -pos.z));
c3d.getTranslation().load(trans);
c3d.getPerspectiveCalculator().calculateOriginData();
}
}
}
});
btn_Manipulator_2_toAverage[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
Vector4f avg = Project.getFileToEdit().getVertexManager().getSelectionCenter();
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
c3d.getManipulator().getPosition().set(avg.x, avg.y, avg.z, 1f);
c3d.getManipulator().setAccuratePosition(new BigDecimal(avg.x / 1000f), new BigDecimal(avg.y / 1000f), new BigDecimal(avg.z / 1000f));
}
}
}
}
});
btn_Manipulator_3_toSubfile[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
Set<GData1> subfiles = Project.getFileToEdit().getVertexManager().getSelectedSubfiles();
if (!subfiles.isEmpty()) {
GData1 subfile = null;
for (GData1 g1 : subfiles) {
subfile = g1;
break;
}
Matrix4f m = subfile.getProductMatrix();
Matrix M = subfile.getAccurateProductMatrix();
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
c3d.getManipulator().getPosition().set(m.m30, m.m31, m.m32, 1f);
c3d.getManipulator().setAccuratePosition(M.M30, M.M31, M.M32);
Vector3f x = new Vector3f(m.m00, m.m01, m.m02);
x.normalise();
Vector3f y = new Vector3f(m.m10, m.m11, m.m12);
y.normalise();
Vector3f z = new Vector3f(m.m20, m.m21, m.m22);
z.normalise();
c3d.getManipulator().getXaxis().set(x.x, x.y, x.z, 1f);
c3d.getManipulator().getYaxis().set(y.x, y.y, y.z, 1f);
c3d.getManipulator().getZaxis().set(z.x, z.y, z.z, 1f);
c3d.getManipulator().setAccurateXaxis(new BigDecimal(c3d.getManipulator().getXaxis().x), new BigDecimal(c3d.getManipulator().getXaxis().y),
new BigDecimal(c3d.getManipulator().getXaxis().z));
c3d.getManipulator().setAccurateYaxis(new BigDecimal(c3d.getManipulator().getYaxis().x), new BigDecimal(c3d.getManipulator().getYaxis().y),
new BigDecimal(c3d.getManipulator().getYaxis().z));
c3d.getManipulator().setAccurateZaxis(new BigDecimal(c3d.getManipulator().getZaxis().x), new BigDecimal(c3d.getManipulator().getZaxis().y),
new BigDecimal(c3d.getManipulator().getZaxis().z));
}
}
}
}
}
});
btn_Manipulator_32_subfileTo[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
VertexManager vm = Project.getFileToEdit().getVertexManager();
Set<GData1> subfiles = vm.getSelectedSubfiles();
if (!subfiles.isEmpty()) {
GData1 subfile = null;
for (GData1 g1 : subfiles) {
if (vm.getLineLinkedToVertices().containsKey(g1)) {
subfile = g1;
break;
}
}
if (subfile == null) {
return;
}
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Manipulator ma = c3d.getManipulator();
vm.transformSubfile(subfile, ma.getAccurateMatrix(), true, true);
}
}
}
}
}
});
btn_Manipulator_4_toVertex[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
float minDist = Float.MAX_VALUE;
Vector4f next = new Vector4f(c3d.getManipulator().getPosition());
Vector4f min = new Vector4f(c3d.getManipulator().getPosition());
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
Set<Vertex> vertices;
if (vm.getSelectedVertices().isEmpty()) {
vertices = vm.getVertices();
} else {
vertices = vm.getSelectedVertices();
}
Vertex minVertex = new Vertex(0f, 0f, 0f);
for (Vertex vertex : vertices) {
Vector4f sub = Vector4f.sub(next, vertex.toVector4f(), null);
float d2 = sub.lengthSquared();
if (d2 < minDist) {
minVertex = vertex;
minDist = d2;
min = vertex.toVector4f();
}
}
c3d.getManipulator().getPosition().set(min.x, min.y, min.z, 1f);
c3d.getManipulator().setAccuratePosition(minVertex.X, minVertex.Y, minVertex.Z);
}
}
}
});
btn_Manipulator_5_toEdge[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Vector4f min = new Vector4f(c3d.getManipulator().getPosition());
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
min = vm.getMinimalDistanceVertexToLines(new Vertex(c3d.getManipulator().getPosition())).toVector4f();
c3d.getManipulator().getPosition().set(min.x, min.y, min.z, 1f);
c3d.getManipulator().setAccuratePosition(new BigDecimal(min.x / 1000f), new BigDecimal(min.y / 1000f), new BigDecimal(min.z / 1000f));
}
}
}
});
btn_Manipulator_6_toSurface[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Vector4f min = new Vector4f(c3d.getManipulator().getPosition());
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
min = vm.getMinimalDistanceVertexToSurfaces(new Vertex(c3d.getManipulator().getPosition())).toVector4f();
c3d.getManipulator().getPosition().set(min.x, min.y, min.z, 1f);
c3d.getManipulator().setAccuratePosition(new BigDecimal(min.x / 1000f), new BigDecimal(min.y / 1000f), new BigDecimal(min.z / 1000f));
}
}
}
});
btn_Manipulator_7_toVertexNormal[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
float minDist = Float.MAX_VALUE;
Vector4f next = new Vector4f(c3d.getManipulator().getPosition());
Vertex min = null;
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
Set<Vertex> vertices;
if (vm.getSelectedVertices().isEmpty()) {
vertices = vm.getVertices();
} else {
vertices = vm.getSelectedVertices();
}
for (Vertex vertex : vertices) {
Vector4f sub = Vector4f.sub(next, vertex.toVector4f(), null);
float d2 = sub.lengthSquared();
if (d2 < minDist) {
minDist = d2;
min = vertex;
}
}
vm = c3d.getLockableDatFileReference().getVertexManager();
Vector4f n = vm.getVertexNormal(min);
float tx = 1f;
float ty = 0f;
float tz = 0f;
if (n.x <= 0f) {
tx = -1;
}
if (Math.abs(Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(0f, 0f, tx), null).length()) > .00001f) {
tz = tx;
tx = 0f;
ty = 0f;
} else if (Math.abs(Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(tx, 0f, 0f), null).length()) > .00001f) {
// ty = 0f;
// tz = 0f;
} else if (Math.abs(Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(0f, 0f, tx), null).length()) > .00001f) {
ty = tx;
tx = 0f;
tz = 0f;
} else {
return;
}
Vector3f cross = (Vector3f) Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(tx, ty, tz), null).normalise();
c3d.getManipulator().getZaxis().set(n.x, n.y, n.z, 1f);
c3d.getManipulator().getXaxis().set(cross.x, cross.y, cross.z, 1f);
Vector4f zaxis = c3d.getManipulator().getZaxis();
Vector4f xaxis = c3d.getManipulator().getXaxis();
cross = Vector3f.cross(new Vector3f(xaxis.x, xaxis.y, xaxis.z), new Vector3f(zaxis.x, zaxis.y, zaxis.z), null);
c3d.getManipulator().getYaxis().set(cross.x, cross.y, cross.z, 1f);
c3d.getManipulator().setAccurateXaxis(new BigDecimal(c3d.getManipulator().getXaxis().x), new BigDecimal(c3d.getManipulator().getXaxis().y),
new BigDecimal(c3d.getManipulator().getXaxis().z));
c3d.getManipulator().setAccurateYaxis(new BigDecimal(c3d.getManipulator().getYaxis().x), new BigDecimal(c3d.getManipulator().getYaxis().y),
new BigDecimal(c3d.getManipulator().getYaxis().z));
c3d.getManipulator().setAccurateZaxis(new BigDecimal(c3d.getManipulator().getZaxis().x), new BigDecimal(c3d.getManipulator().getZaxis().y),
new BigDecimal(c3d.getManipulator().getZaxis().z));
}
}
}
});
btn_Manipulator_8_toEdgeNormal[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
Vector4f n = vm.getMinimalDistanceEdgeNormal(new Vertex(c3d.getManipulator().getPosition()));
float tx = 1f;
float ty = 0f;
float tz = 0f;
if (n.x <= 0f) {
tx = -1;
}
if (Math.abs(Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(0f, 0f, tx), null).length()) > .00001f) {
tz = tx;
tx = 0f;
ty = 0f;
} else if (Math.abs(Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(tx, 0f, 0f), null).length()) > .00001f) {
// ty = 0f;
// tz = 0f;
} else if (Math.abs(Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(0f, 0f, tx), null).length()) > .00001f) {
ty = tx;
tx = 0f;
tz = 0f;
} else {
return;
}
Vector3f cross = (Vector3f) Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(tx, ty, tz), null).normalise();
c3d.getManipulator().getZaxis().set(n.x, n.y, n.z, 1f);
c3d.getManipulator().getXaxis().set(cross.x, cross.y, cross.z, 1f);
Vector4f zaxis = c3d.getManipulator().getZaxis();
Vector4f xaxis = c3d.getManipulator().getXaxis();
cross = Vector3f.cross(new Vector3f(xaxis.x, xaxis.y, xaxis.z), new Vector3f(zaxis.x, zaxis.y, zaxis.z), null);
c3d.getManipulator().getYaxis().set(cross.x, cross.y, cross.z, 1f);
c3d.getManipulator().setAccurateXaxis(new BigDecimal(c3d.getManipulator().getXaxis().x), new BigDecimal(c3d.getManipulator().getXaxis().y),
new BigDecimal(c3d.getManipulator().getXaxis().z));
c3d.getManipulator().setAccurateYaxis(new BigDecimal(c3d.getManipulator().getYaxis().x), new BigDecimal(c3d.getManipulator().getYaxis().y),
new BigDecimal(c3d.getManipulator().getYaxis().z));
c3d.getManipulator().setAccurateZaxis(new BigDecimal(c3d.getManipulator().getZaxis().x), new BigDecimal(c3d.getManipulator().getZaxis().y),
new BigDecimal(c3d.getManipulator().getZaxis().z));
}
}
}
});
btn_Manipulator_9_toSurfaceNormal[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
Vector4f n = vm.getMinimalDistanceSurfaceNormal(new Vertex(c3d.getManipulator().getPosition()));
float tx = 1f;
float ty = 0f;
float tz = 0f;
if (n.x <= 0f) {
tx = -1;
}
if (Math.abs(Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(0f, 0f, tx), null).length()) > .00001f) {
tz = tx;
tx = 0f;
ty = 0f;
} else if (Math.abs(Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(tx, 0f, 0f), null).length()) > .00001f) {
// ty = 0f;
// tz = 0f;
} else if (Math.abs(Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(0f, 0f, tx), null).length()) > .00001f) {
ty = tx;
tx = 0f;
tz = 0f;
} else {
return;
}
Vector3f cross = (Vector3f) Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(tx, ty, tz), null).normalise();
c3d.getManipulator().getZaxis().set(n.x, n.y, n.z, 1f);
c3d.getManipulator().getXaxis().set(cross.x, cross.y, cross.z, 1f);
Vector4f zaxis = c3d.getManipulator().getZaxis();
Vector4f xaxis = c3d.getManipulator().getXaxis();
cross = Vector3f.cross(new Vector3f(xaxis.x, xaxis.y, xaxis.z), new Vector3f(zaxis.x, zaxis.y, zaxis.z), null);
c3d.getManipulator().getYaxis().set(cross.x, cross.y, cross.z, 1f);
c3d.getManipulator().setAccurateXaxis(new BigDecimal(c3d.getManipulator().getXaxis().x), new BigDecimal(c3d.getManipulator().getXaxis().y),
new BigDecimal(c3d.getManipulator().getXaxis().z));
c3d.getManipulator().setAccurateYaxis(new BigDecimal(c3d.getManipulator().getYaxis().x), new BigDecimal(c3d.getManipulator().getYaxis().y),
new BigDecimal(c3d.getManipulator().getYaxis().z));
c3d.getManipulator().setAccurateZaxis(new BigDecimal(c3d.getManipulator().getZaxis().x), new BigDecimal(c3d.getManipulator().getZaxis().y),
new BigDecimal(c3d.getManipulator().getZaxis().z));
}
}
}
});
btn_Manipulator_XIV_adjustRotationCenter[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.adjustRotationCenter(c3d, null);
}
}
}
});
mntm_SelectAll[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
loadSelectorSettings();
vm.selectAll(sels, true);
vm.syncWithTextEditors();
return;
}
}
}
});
mntm_SelectAllVisible[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
loadSelectorSettings();
vm.selectAll(sels, false);
vm.syncWithTextEditors();
return;
}
}
}
});
mntm_SelectAllWithColours[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
loadSelectorSettings();
vm.selectAllWithSameColours(sels, true);
vm.syncWithTextEditors();
return;
}
}
}
});
mntm_SelectAllVisibleWithColours[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
loadSelectorSettings();
vm.selectAllWithSameColours(sels, false);
vm.syncWithTextEditors();
return;
}
}
}
});
mntm_SelectNone[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.clearSelection();
vm.syncWithTextEditors();
return;
}
}
}
});
mntm_SelectInverse[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
loadSelectorSettings();
vm.selectInverse(sels);
vm.syncWithTextEditors();
return;
}
}
}
});
mntm_WithSameColour[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
mntm_SelectEverything[0].setEnabled(
mntm_WithHiddenData[0].getSelection() ||
mntm_WithSameColour[0].getSelection() ||
mntm_WithSameOrientation[0].getSelection() ||
mntm_ExceptSubfiles[0].getSelection()
);
showSelectMenu();
}
});
}
});
mntm_WithSameOrientation[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
mntm_SelectEverything[0].setEnabled(
mntm_WithHiddenData[0].getSelection() ||
mntm_WithSameColour[0].getSelection() ||
mntm_WithSameOrientation[0].getSelection() ||
mntm_ExceptSubfiles[0].getSelection()
);
if (mntm_WithSameOrientation[0].getSelection()) {
new ValueDialog(getShell(), "Set angular surface normal difference:", "Threshold in degree [°], range from -90 to 180.\nNegative values do not care about the surface winding,\nwhile positive do.") { //$NON-NLS-1$ //$NON-NLS-2$ I18N
@Override
public void initializeSpinner() {
this.spn_Value[0].setMinimum(new BigDecimal("-90")); //$NON-NLS-1$
this.spn_Value[0].setMaximum(new BigDecimal("180")); //$NON-NLS-1$
this.spn_Value[0].setValue(sels.getAngle());
}
@Override
public void applyValue() {
sels.setAngle(this.spn_Value[0].getValue());
}
}.open();
}
showSelectMenu();
}
});
}
});
mntm_WithAccuracy[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
mntm_SelectEverything[0].setEnabled(
mntm_WithHiddenData[0].getSelection() ||
mntm_WithSameColour[0].getSelection() ||
mntm_WithSameOrientation[0].getSelection() ||
mntm_ExceptSubfiles[0].getSelection()
);
if (mntm_WithAccuracy[0].getSelection()) {
new ValueDialog(getShell(), "Set accuracy:", "Threshold in LDU, range from 0 to 1000.\nControls the maximum distance between two points that the process will consider matching") { //$NON-NLS-1$ //$NON-NLS-2$ I18N
@Override
public void initializeSpinner() {
this.spn_Value[0].setMinimum(new BigDecimal("0")); //$NON-NLS-1$
this.spn_Value[0].setMaximum(new BigDecimal("1000")); //$NON-NLS-1$
this.spn_Value[0].setValue(sels.getEqualDistance());
}
@Override
public void applyValue() {
sels.setEqualDistance(this.spn_Value[0].getValue());
}
}.open();
}
showSelectMenu();
}
});
}
});
mntm_WithHiddenData[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
mntm_SelectEverything[0].setEnabled(
mntm_WithHiddenData[0].getSelection() ||
mntm_WithSameColour[0].getSelection() ||
mntm_WithSameOrientation[0].getSelection() ||
mntm_ExceptSubfiles[0].getSelection()
);
showSelectMenu();
}
});
}
});
mntm_WithWholeSubfiles[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
showSelectMenu();
}
});
}
});
mntm_ExceptSubfiles[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
mntm_SelectEverything[0].setEnabled(
mntm_WithHiddenData[0].getSelection() ||
mntm_WithSameColour[0].getSelection() ||
mntm_WithSameOrientation[0].getSelection() ||
mntm_ExceptSubfiles[0].getSelection()
);
mntm_WithWholeSubfiles[0].setEnabled(!mntm_ExceptSubfiles[0].getSelection());
showSelectMenu();
}
});
}
});
mntm_StopAtEdges[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
showSelectMenu();
}
});
}
});
mntm_STriangles[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
showSelectMenu();
}
});
}
});
mntm_SQuads[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
showSelectMenu();
}
});
}
});
mntm_SCLines[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
showSelectMenu();
}
});
}
});
mntm_SVertices[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
showSelectMenu();
}
});
}
});
mntm_SLines[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
showSelectMenu();
}
});
}
});
mntm_SelectEverything[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
sels.setScope(SelectorSettings.EVERYTHING);
loadSelectorSettings();
vm.selector(sels);
vm.syncWithTextEditors();
}
}
}
});
mntm_SelectConnected[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
sels.setScope(SelectorSettings.CONNECTED);
loadSelectorSettings();
vm.selector(sels);
vm.syncWithTextEditors();
}
}
}
});
mntm_SelectTouching[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
sels.setScope(SelectorSettings.TOUCHING);
loadSelectorSettings();
vm.selector(sels);
vm.syncWithTextEditors();
}
}
}
});
mntm_SelectIsolatedVertices[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.selectIsolatedVertices();
vm.syncWithTextEditors();
}
}
}
});
}
});
mntm_Split[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.split(2);
}
}
}
});
}
});
mntm_SplitNTimes[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
final int[] frac = new int[]{2};
if (new ValueDialogInt(getShell(), "Split edges:", "(Number of resulting fractions)") { //$NON-NLS-1$ //$NON-NLS-2$ I18N
@Override
public void initializeSpinner() {
this.spn_Value[0].setMinimum(2);
this.spn_Value[0].setMaximum(1000);
this.spn_Value[0].setValue(2);
}
@Override
public void applyValue() {
frac[0] = this.spn_Value[0].getValue();
}
}.open() == OK) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.split(frac[0]);
}
}
}
}
});
}
});
mntm_MergeToAverage[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.merge(MergeTo.AVERAGE, true);
return;
}
}
}
});
mntm_MergeToLastSelected[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.merge(MergeTo.LAST_SELECTED, true);
return;
}
}
}
});
mntm_MergeToNearestVertex[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.merge(MergeTo.NEAREST_VERTEX, true);
return;
}
}
}
});
mntm_MergeToNearestEdge[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.merge(MergeTo.NEAREST_EDGE, true);
return;
}
}
}
});
mntm_MergeToNearestFace[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.merge(MergeTo.NEAREST_FACE, true);
return;
}
}
}
});
mntm_setXYZ[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Vertex v = null;
final VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
final Set<Vertex> sv = vm.getSelectedVertices();
if (sv.size() == 1) {
v = sv.iterator().next();
}
if (new CoordinatesDialog(getShell(), v).open() == IDialogConstants.OK_ID) {
vm.setXyzOrTranslateOrTransform(CoordinatesDialog.getVertex(), null, TransformationMode.SET, CoordinatesDialog.isX(), CoordinatesDialog.isY(), CoordinatesDialog.isZ(), true);
}
return;
}
}
}
});
mntm_Translate[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
if (new TranslateDialog(getShell(), null).open() == IDialogConstants.OK_ID) {
c3d.getLockableDatFileReference().getVertexManager().setXyzOrTranslateOrTransform(TranslateDialog.getOffset(), null, TransformationMode.TRANSLATE, TranslateDialog.isX(), TranslateDialog.isY(), TranslateDialog.isZ(), true);
}
return;
}
}
}
});
mntm_Rotate[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
TreeSet<Vertex> clipboard = new TreeSet<Vertex>();
if (VertexManager.getClipboard().size() == 1) {
GData vertex = VertexManager.getClipboard().get(0);
if (vertex.type() == 0) {
String line = vertex.toString();
line = line.replaceAll("\\s+", " ").trim(); //$NON-NLS-1$ //$NON-NLS-2$
String[] data_segments = line.split("\\s+"); //$NON-NLS-1$
if (line.startsWith("0 !LPE")) { //$NON-NLS-1$
if (line.startsWith("VERTEX ", 7)) { //$NON-NLS-1$
Vector3d start = new Vector3d();
boolean numberError = false;
if (data_segments.length == 6) {
try {
start.setX(new BigDecimal(data_segments[3], Threshold.mc));
} catch (NumberFormatException nfe) {
numberError = true;
}
try {
start.setY(new BigDecimal(data_segments[4], Threshold.mc));
} catch (NumberFormatException nfe) {
numberError = true;
}
try {
start.setZ(new BigDecimal(data_segments[5], Threshold.mc));
} catch (NumberFormatException nfe) {
numberError = true;
}
} else {
numberError = true;
}
if (!numberError) {
clipboard.add(new Vertex(start));
}
}
}
}
}
if (new RotateDialog(getShell(), null, clipboard).open() == IDialogConstants.OK_ID) {
c3d.getLockableDatFileReference().getVertexManager().setXyzOrTranslateOrTransform(RotateDialog.getAngles(), RotateDialog.getPivot(), TransformationMode.ROTATE, RotateDialog.isX(), RotateDialog.isY(), TranslateDialog.isZ(), true);
}
return;
}
}
}
});
mntm_Scale[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
TreeSet<Vertex> clipboard = new TreeSet<Vertex>();
if (VertexManager.getClipboard().size() == 1) {
GData vertex = VertexManager.getClipboard().get(0);
if (vertex.type() == 0) {
String line = vertex.toString();
line = line.replaceAll("\\s+", " ").trim(); //$NON-NLS-1$ //$NON-NLS-2$
String[] data_segments = line.split("\\s+"); //$NON-NLS-1$
if (line.startsWith("0 !LPE")) { //$NON-NLS-1$
if (line.startsWith("VERTEX ", 7)) { //$NON-NLS-1$
Vector3d start = new Vector3d();
boolean numberError = false;
if (data_segments.length == 6) {
try {
start.setX(new BigDecimal(data_segments[3], Threshold.mc));
} catch (NumberFormatException nfe) {
numberError = true;
}
try {
start.setY(new BigDecimal(data_segments[4], Threshold.mc));
} catch (NumberFormatException nfe) {
numberError = true;
}
try {
start.setZ(new BigDecimal(data_segments[5], Threshold.mc));
} catch (NumberFormatException nfe) {
numberError = true;
}
} else {
numberError = true;
}
if (!numberError) {
clipboard.add(new Vertex(start));
}
}
}
}
}
if (new ScaleDialog(getShell(), null, clipboard).open() == IDialogConstants.OK_ID) {
c3d.getLockableDatFileReference().getVertexManager().setXyzOrTranslateOrTransform(ScaleDialog.getScaleFactors(), ScaleDialog.getPivot(), TransformationMode.SCALE, ScaleDialog.isX(), ScaleDialog.isY(), ScaleDialog.isZ(), true);
}
return;
}
}
}
});
mntm_Edger2[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
if (new EdgerDialog(getShell(), es).open() == IDialogConstants.OK_ID)
vm.addEdges(es);
return;
}
}
}
});
mntm_Rectifier[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
if (new RectifierDialog(getShell(), rs).open() == IDialogConstants.OK_ID)
vm.rectify(rs, true);
return;
}
}
}
});
mntm_Isecalc[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
if (new IsecalcDialog(getShell(), is).open() == IDialogConstants.OK_ID)
vm.isecalc(is);
return;
}
}
}
});
mntm_SlicerPro[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
if (new SlicerProDialog(getShell(), ss).open() == IDialogConstants.OK_ID)
vm.slicerpro(ss);
return;
}
}
}
});
mntm_Intersector[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
if (new IntersectorDialog(getShell(), ins).open() == IDialogConstants.OK_ID)
vm.intersector(ins, true);
return;
}
}
}
});
mntm_Lines2Pattern[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
if (new Lines2PatternDialog(getShell()).open() == IDialogConstants.OK_ID)
vm.lines2pattern();
return;
}
}
}
});
mntm_PathTruder[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
if (new PathTruderDialog(getShell(), ps).open() == IDialogConstants.OK_ID)
vm.pathTruder(ps);
return;
}
}
}
});
mntm_SymSplitter[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
if (new SymSplitterDialog(getShell(), sims).open() == IDialogConstants.OK_ID)
vm.symSplitter(sims);
return;
}
}
}
});
mntm_Unificator[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
if (new UnificatorDialog(getShell(), us).open() == IDialogConstants.OK_ID)
vm.unificator(us);
return;
}
}
}
});
mntm_Txt2Dat[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
DatFile df = c3d.getLockableDatFileReference();
if (df.isReadOnly()) return;
VertexManager vm = df.getVertexManager();
if (new Txt2DatDialog(getShell(), ts).open() == IDialogConstants.OK_ID && !ts.getText().trim().isEmpty()) {
java.awt.Font myFont;
if (ts.getFontData() == null) {
myFont = new java.awt.Font(org.nschmidt.ldparteditor.enums.Font.MONOSPACE.getFontData()[0].getName(), java.awt.Font.PLAIN, 32);
} else {
FontData fd = ts.getFontData();
int style = 0;
final int c2 = SWT.BOLD | SWT.ITALIC;
switch (fd.getStyle()) {
case c2:
style = java.awt.Font.BOLD | java.awt.Font.ITALIC;
break;
case SWT.BOLD:
style = java.awt.Font.BOLD;
break;
case SWT.ITALIC:
style = java.awt.Font.ITALIC;
break;
case SWT.NORMAL:
style = java.awt.Font.PLAIN;
break;
}
myFont = new java.awt.Font(fd.getName(), style, fd.getHeight());
}
GData anchorData = df.getDrawChainTail();
int lineNumber = df.getDrawPerLine_NOCLONE().getKey(anchorData);
Set<GData> triangleSet = TextTriangulator.triangulateText(myFont, ts.getText().trim(), ts.getFlatness().doubleValue(), ts.getInterpolateFlatness().doubleValue(), View.DUMMY_REFERENCE, df, ts.getFontHeight().intValue(), ts.getDeltaAngle().doubleValue());
for (GData gda3 : triangleSet) {
lineNumber++;
df.getDrawPerLine_NOCLONE().put(lineNumber, gda3);
GData gdata = gda3;
anchorData.setNext(gda3);
anchorData = gdata;
}
anchorData.setNext(null);
df.setDrawChainTail(anchorData);
vm.setModified(true);
return;
}
}
}
}
});
// MARK Options
mntm_ResetSettingsOnRestart[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_WARNING | SWT.OK | SWT.CANCEL);
messageBox.setText("Warning:"); //$NON-NLS-1$ I18N
messageBox.setMessage("Are you sure to delete your configuration on the next start?"); //$NON-NLS-1$
int result = messageBox.open();
if (result == SWT.CANCEL) {
return;
}
WorkbenchManager.getUserSettingState().setResetOnStart(true);
}
});
mntm_SelectAnotherLDConfig[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
FileDialog fd = new FileDialog(sh, SWT.OPEN);
fd.setText("Open LDraw Configuration File (LDConfig.ldr):"); //$NON-NLS-1$ I18N Needs translation!
fd.setFilterPath(WorkbenchManager.getUserSettingState().getLdrawFolderPath());
String[] filterExt = { "*.ldr", "LDConfig.ldr", "*.*" }; //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
fd.setFilterExtensions(filterExt);
String[] filterNames = { "LDraw Configuration File (*.ldr)", "LDraw Configuration File (LDConfig.ldr)", "All Files" }; //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ I18N Needs translation!
fd.setFilterNames(filterNames);
String selected = fd.open();
System.out.println(selected);
if (selected != null && View.loadLDConfig(selected)) {
GData.CACHE_warningsAndErrors.clear();
WorkbenchManager.getUserSettingState().setLdConfigPath(selected);
Set<DatFile> dfs = new HashSet<DatFile>();
for (OpenGLRenderer renderer : renders) {
dfs.add(renderer.getC3D().getLockableDatFileReference());
}
for (DatFile df : dfs) {
SubfileCompiler.compile(df);
}
}
}
});
mntm_SyncWithTextEditor[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
WorkbenchManager.getUserSettingState().getSyncWithTextEditor().set(mntm_SyncWithTextEditor[0].getSelection());
}
});
mntm_SyncLpeInline[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
WorkbenchManager.getUserSettingState().getSyncWithLpeInline().set(mntm_SyncLpeInline[0].getSelection());
}
});
// MARK Merge, split...
mntm_Flip[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.flipSelection();
return;
}
}
}
});
mntm_SubdivideCatmullClark[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.subdivideCatmullClark();
return;
}
}
}
});
mntm_SubdivideLoop[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.subdivideLoop();
return;
}
}
}
});
// MARK Background PNG
btn_PngFocus[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Composite3D c3d = null;
for (OpenGLRenderer renderer : renders) {
c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
c3d = c3d.getLockableDatFileReference().getLastSelectedComposite();
if (c3d == null) {
c3d = renderer.getC3D();
}
break;
}
}
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (png == null) {
if (c3d.getLockableDatFileReference().hasNoBackgroundPictures()) {
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
vm.setSelectedBgPicture(c3d.getLockableDatFileReference().getBackgroundPicture(0));
}
png = vm.getSelectedBgPicture();
updateBgPictureTab();
}
Matrix4f tMatrix = new Matrix4f();
tMatrix.setIdentity();
tMatrix = tMatrix.scale(new Vector3f(png.scale.x, png.scale.y, png.scale.z));
Matrix4f dMatrix = new Matrix4f();
dMatrix.setIdentity();
Matrix4f.rotate((float) (png.angleB.doubleValue() / 180.0 * Math.PI), new Vector3f(1f, 0f, 0f), dMatrix, dMatrix);
Matrix4f.rotate((float) (png.angleA.doubleValue() / 180.0 * Math.PI), new Vector3f(0f, 1f, 0f), dMatrix, dMatrix);
Matrix4f.mul(dMatrix, tMatrix, tMatrix);
Vector4f vx = Matrix4f.transform(dMatrix, new Vector4f(png.offset.x, 0f, 0f, 1f), null);
Vector4f vy = Matrix4f.transform(dMatrix, new Vector4f(0f, png.offset.y, 0f, 1f), null);
Vector4f vz = Matrix4f.transform(dMatrix, new Vector4f(0f, 0f, png.offset.z, 1f), null);
Matrix4f transMatrix = new Matrix4f();
transMatrix.setIdentity();
transMatrix.m30 = -vx.x;
transMatrix.m31 = -vx.y;
transMatrix.m32 = -vx.z;
transMatrix.m30 -= vy.x;
transMatrix.m31 -= vy.y;
transMatrix.m32 -= vy.z;
transMatrix.m30 -= vz.x;
transMatrix.m31 -= vz.y;
transMatrix.m32 -= vz.z;
Matrix4f rotMatrixD = new Matrix4f();
rotMatrixD.setIdentity();
Matrix4f.rotate((float) (png.angleB.doubleValue() / 180.0 * Math.PI), new Vector3f(1f, 0f, 0f), rotMatrixD, rotMatrixD);
Matrix4f.rotate((float) (png.angleA.doubleValue() / 180.0 * Math.PI), new Vector3f(0f, 1f, 0f), rotMatrixD, rotMatrixD);
rotMatrixD = rotMatrixD.scale(new Vector3f(-1f, 1f, -1f));
rotMatrixD.invert();
c3d.getRotation().load(rotMatrixD);
c3d.getTranslation().load(transMatrix);
c3d.getPerspectiveCalculator().calculateOriginData();
vm.setSelectedBgPicture(png);
return;
}
});
btn_PngImage[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (updatingPngPictureTab) return;
if (png == null) {
if (c3d.getLockableDatFileReference().hasNoBackgroundPictures()) {
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
vm.setSelectedBgPicture(c3d.getLockableDatFileReference().getBackgroundPicture(0));
}
png = vm.getSelectedBgPicture();
updateBgPictureTab();
}
FileDialog fd = new FileDialog(getShell(), SWT.SAVE);
fd.setText("Open PNG Image"); //$NON-NLS-1$ I18N Needs translation!
try {
File f = new File(png.texturePath);
fd.setFilterPath(f.getParent());
fd.setFileName(f.getName());
} catch (Exception ex) {
}
String[] filterExt = { "*.png", "*.*" }; //$NON-NLS-1$ //$NON-NLS-2$
fd.setFilterExtensions(filterExt);
String[] filterNames = { "Portable Network Graphics (*.png)", "All Files" }; //$NON-NLS-1$ //$NON-NLS-2$ I18N Needs translation!
fd.setFilterNames(filterNames);
String texturePath = fd.open();
if (texturePath != null) {
String newText = png.getString(png.offset, png.angleA, png.angleB, png.angleC, png.scale, texturePath);
GDataPNG newPngPicture = new GDataPNG(newText, png.offset, png.angleA, png.angleB, png.angleC, png.scale, texturePath);
replaceBgPicture(png, newPngPicture, c3d.getLockableDatFileReference());
pngPictureUpdateCounter++;
if (pngPictureUpdateCounter > 3) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeOldTextures();
}
pngPictureUpdateCounter = 0;
}
vm.setModified(true);
}
return;
}
}
}
});
btn_PngNext[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
DatFile df = c3d.getLockableDatFileReference();
if (df.equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = df.getVertexManager();
GDataPNG sp = vm.getSelectedBgPicture();
boolean noBgPictures = df.hasNoBackgroundPictures();
vm.setSelectedBgPictureIndex(vm.getSelectedBgPictureIndex() + 1);
boolean indexOutOfBounds = vm.getSelectedBgPictureIndex() >= df.getBackgroundPictureCount();
boolean noRealData = df.getDrawPerLine_NOCLONE().getKey(sp) == null;
if (noBgPictures) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeAllTextures();
}
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
if (indexOutOfBounds) vm.setSelectedBgPictureIndex(0);
if (noRealData) {
vm.setSelectedBgPictureIndex(0);
vm.setSelectedBgPicture(df.getBackgroundPicture(0));
} else {
vm.setSelectedBgPicture(df.getBackgroundPicture(vm.getSelectedBgPictureIndex()));
}
}
updateBgPictureTab();
}
}
}
});
btn_PngPrevious[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
DatFile df = c3d.getLockableDatFileReference();
if (df.equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = df.getVertexManager();
GDataPNG sp = vm.getSelectedBgPicture();
boolean noBgPictures = df.hasNoBackgroundPictures();
vm.setSelectedBgPictureIndex(vm.getSelectedBgPictureIndex() - 1);
boolean indexOutOfBounds = vm.getSelectedBgPictureIndex() < 0;
boolean noRealData = df.getDrawPerLine_NOCLONE().getKey(sp) == null;
if (noBgPictures) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeAllTextures();
}
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
if (indexOutOfBounds) vm.setSelectedBgPictureIndex(df.getBackgroundPictureCount() - 1);
if (noRealData) {
vm.setSelectedBgPictureIndex(0);
vm.setSelectedBgPicture(df.getBackgroundPicture(0));
} else {
vm.setSelectedBgPicture(df.getBackgroundPicture(vm.getSelectedBgPictureIndex()));
}
}
updateBgPictureTab();
}
}
}
});
spn_PngA1[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (updatingPngPictureTab) return;
if (png == null) {
if (c3d.getLockableDatFileReference().hasNoBackgroundPictures()) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeAllTextures();
}
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
vm.setSelectedBgPicture(c3d.getLockableDatFileReference().getBackgroundPicture(0));
}
png = vm.getSelectedBgPicture();
updateBgPictureTab();
}
String newText = png.getString(png.offset, spn.getValue(), png.angleB, png.angleC, png.scale, png.texturePath);
GDataPNG newPngPicture = new GDataPNG(newText, png.offset, spn.getValue(), png.angleB, png.angleC, png.scale, png.texturePath);
replaceBgPicture(png, newPngPicture, c3d.getLockableDatFileReference());
pngPictureUpdateCounter++;
if (pngPictureUpdateCounter > 3) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeOldTextures();
}
pngPictureUpdateCounter = 0;
}
vm.setModified(true);
return;
}
}
}
});
spn_PngA2[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (updatingPngPictureTab) return;
if (png == null) {
if (c3d.getLockableDatFileReference().hasNoBackgroundPictures()) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeAllTextures();
}
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
vm.setSelectedBgPicture(c3d.getLockableDatFileReference().getBackgroundPicture(0));
}
png = vm.getSelectedBgPicture();
updateBgPictureTab();
}
String newText = png.getString(png.offset, png.angleA, spn.getValue(), png.angleC, png.scale, png.texturePath);
GDataPNG newPngPicture = new GDataPNG(newText, png.offset, png.angleA, spn.getValue(), png.angleC, png.scale, png.texturePath);
replaceBgPicture(png, newPngPicture, c3d.getLockableDatFileReference());
pngPictureUpdateCounter++;
if (pngPictureUpdateCounter > 3) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeOldTextures();
}
pngPictureUpdateCounter = 0;
}
vm.setModified(true);
return;
}
}
}
});
spn_PngA3[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (updatingPngPictureTab) return;
if (png == null) {
if (c3d.getLockableDatFileReference().hasNoBackgroundPictures()) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeAllTextures();
}
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
vm.setSelectedBgPicture(c3d.getLockableDatFileReference().getBackgroundPicture(0));
}
png = vm.getSelectedBgPicture();
updateBgPictureTab();
}
String newText = png.getString(png.offset, png.angleA, png.angleB, spn.getValue(), png.scale, png.texturePath);
GDataPNG newPngPicture = new GDataPNG(newText, png.offset, png.angleA, png.angleB, spn.getValue(), png.scale, png.texturePath);
replaceBgPicture(png, newPngPicture, c3d.getLockableDatFileReference());
pngPictureUpdateCounter++;
if (pngPictureUpdateCounter > 3) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeOldTextures();
}
pngPictureUpdateCounter = 0;
}
vm.setModified(true);
return;
}
}
}
});
spn_PngSX[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (updatingPngPictureTab) return;
if (png == null) {
if (c3d.getLockableDatFileReference().hasNoBackgroundPictures()) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeAllTextures();
}
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
vm.setSelectedBgPicture(c3d.getLockableDatFileReference().getBackgroundPicture(0));
}
png = vm.getSelectedBgPicture();
updateBgPictureTab();
}
Vertex newScale = new Vertex(spn.getValue(), png.scale.Y, png.scale.Z);
String newText = png.getString(png.offset, png.angleA, png.angleB, png.angleC, newScale, png.texturePath);
GDataPNG newPngPicture = new GDataPNG(newText, png.offset, png.angleA, png.angleB, png.angleC, newScale, png.texturePath);
replaceBgPicture(png, newPngPicture, c3d.getLockableDatFileReference());
pngPictureUpdateCounter++;
if (pngPictureUpdateCounter > 3) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeOldTextures();
}
pngPictureUpdateCounter = 0;
}
vm.setModified(true);
return;
}
}
}
});
spn_PngSY[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (updatingPngPictureTab) return;
if (png == null) {
if (c3d.getLockableDatFileReference().hasNoBackgroundPictures()) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeAllTextures();
}
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
vm.setSelectedBgPicture(c3d.getLockableDatFileReference().getBackgroundPicture(0));
}
png = vm.getSelectedBgPicture();
updateBgPictureTab();
}
Vertex newScale = new Vertex(png.scale.X, spn.getValue(), png.scale.Z);
String newText = png.getString(png.offset, png.angleA, png.angleB, png.angleC, newScale, png.texturePath);
GDataPNG newPngPicture = new GDataPNG(newText, png.offset, png.angleA, png.angleB, png.angleC, newScale, png.texturePath);
replaceBgPicture(png, newPngPicture, c3d.getLockableDatFileReference());
vm.setModified(true);
return;
}
}
}
});
spn_PngX[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (updatingPngPictureTab) return;
if (png == null) {
if (c3d.getLockableDatFileReference().hasNoBackgroundPictures()) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeAllTextures();
}
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
vm.setSelectedBgPicture(c3d.getLockableDatFileReference().getBackgroundPicture(0));
}
png = vm.getSelectedBgPicture();
updateBgPictureTab();
}
Vertex newOffset = new Vertex(spn.getValue(), png.offset.Y, png.offset.Z);
String newText = png.getString(newOffset, png.angleA, png.angleB, png.angleC, png.scale, png.texturePath);
GDataPNG newPngPicture = new GDataPNG(newText, newOffset, png.angleA, png.angleB, png.angleC, png.scale, png.texturePath);
replaceBgPicture(png, newPngPicture, c3d.getLockableDatFileReference());
pngPictureUpdateCounter++;
if (pngPictureUpdateCounter > 3) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeOldTextures();
}
pngPictureUpdateCounter = 0;
}
vm.setModified(true);
return;
}
}
}
});
spn_PngY[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (updatingPngPictureTab) return;
if (png == null) {
if (c3d.getLockableDatFileReference().hasNoBackgroundPictures()) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeAllTextures();
}
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
vm.setSelectedBgPicture(c3d.getLockableDatFileReference().getBackgroundPicture(0));
}
png = vm.getSelectedBgPicture();
updateBgPictureTab();
}
Vertex newOffset = new Vertex(png.offset.X, spn.getValue(), png.offset.Z);
String newText = png.getString(newOffset, png.angleA, png.angleB, png.angleC, png.scale, png.texturePath);
GDataPNG newPngPicture = new GDataPNG(newText, newOffset, png.angleA, png.angleB, png.angleC, png.scale, png.texturePath);
replaceBgPicture(png, newPngPicture, c3d.getLockableDatFileReference());
pngPictureUpdateCounter++;
if (pngPictureUpdateCounter > 3) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeOldTextures();
}
pngPictureUpdateCounter = 0;
}
vm.setModified(true);
return;
}
}
}
});
spn_PngZ[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (updatingPngPictureTab) return;
if (png == null) {
if (c3d.getLockableDatFileReference().hasNoBackgroundPictures()) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeAllTextures();
}
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
vm.setSelectedBgPicture(c3d.getLockableDatFileReference().getBackgroundPicture(0));
}
png = vm.getSelectedBgPicture();
updateBgPictureTab();
}
Vertex newOffset = new Vertex(png.offset.X, png.offset.Y, spn.getValue());
String newText = png.getString(newOffset, png.angleA, png.angleB, png.angleC, png.scale, png.texturePath);
GDataPNG newPngPicture = new GDataPNG(newText, newOffset, png.angleA, png.angleB, png.angleC, png.scale, png.texturePath);
replaceBgPicture(png, newPngPicture, c3d.getLockableDatFileReference());
pngPictureUpdateCounter++;
if (pngPictureUpdateCounter > 3) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeOldTextures();
}
pngPictureUpdateCounter = 0;
}
vm.setModified(true);
return;
}
}
}
});
Project.createDefault();
treeItem_Project[0].setData(Project.getProjectPath());
treeItem_Official[0].setData(WorkbenchManager.getUserSettingState().getLdrawFolderPath());
treeItem_Unofficial[0].setData(WorkbenchManager.getUserSettingState().getUnofficialFolderPath());
LibraryManager.readUnofficialParts(treeItem_UnofficialParts[0]);
LibraryManager.readUnofficialSubparts(treeItem_UnofficialSubparts[0]);
LibraryManager.readUnofficialPrimitives(treeItem_UnofficialPrimitives[0]);
LibraryManager.readUnofficialHiResPrimitives(treeItem_UnofficialPrimitives48[0]);
LibraryManager.readOfficialParts(treeItem_OfficialParts[0]);
LibraryManager.readOfficialSubparts(treeItem_OfficialSubparts[0]);
LibraryManager.readOfficialPrimitives(treeItem_OfficialPrimitives[0]);
LibraryManager.readOfficialHiResPrimitives(treeItem_OfficialPrimitives48[0]);
txt_Search[0].setText(" "); //$NON-NLS-1$
txt_Search[0].setText(""); //$NON-NLS-1$
Project.getFileToEdit().setLastSelectedComposite(Editor3DWindow.renders.get(0).getC3D());
new EditorTextWindow().run(Project.getFileToEdit());
updateBgPictureTab();
this.open();
// Dispose all resources (never delete this!)
ResourceManager.dispose();
SWTResourceManager.dispose();
// Dispose the display (never delete this, too!)
Display.getCurrent().dispose();
}
private void replaceBgPicture(GDataPNG selectedBgPicture, GDataPNG newBgPicture, DatFile linkedDatFile) {
if (linkedDatFile.getDrawPerLine_NOCLONE().getKey(selectedBgPicture) == null) return;
GData before = selectedBgPicture.getBefore();
GData next = selectedBgPicture.getNext();
int index = linkedDatFile.getDrawPerLine_NOCLONE().getKey(selectedBgPicture);
selectedBgPicture.setGoingToBeReplaced(true);
linkedDatFile.getVertexManager().remove(selectedBgPicture);
linkedDatFile.getDrawPerLine_NOCLONE().put(index, newBgPicture);
before.setNext(newBgPicture);
newBgPicture.setNext(next);
linkedDatFile.getVertexManager().setSelectedBgPicture(newBgPicture);
updateBgPictureTab();
return;
}
private void resetAddState() {
setAddingSubfiles(false);
setAddingVertices(false);
setAddingLines(false);
setAddingTriangles(false);
setAddingQuads(false);
setAddingCondlines(false);
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
DatFile df = c3d.getLockableDatFileReference();
df.setObjVertex1(null);
df.setObjVertex2(null);
df.setObjVertex3(null);
df.setObjVertex4(null);
df.setNearestObjVertex1(null);
df.setNearestObjVertex2(null);
}
}
/**
* Create the actions.
*/
private void createActions() {
// Create the actions
// {
// menuItem_Open = new Action(I18n.EDITOR3D_Open) {
// @Override
// public void run() {
//
// }
// };
// menuItem_Open.setAccelerator(SWT.CTRL | 'Z');
// }
// {
// menuItem_Exit = new Action(I18n.EDITOR3D_Exit) {
// @Override
// public void run() {
//
// }
// };
// }
// {
// toolItem_Save = new Action(I18n.EDITOR3D_Save) {
// };
// toolItem_Save.setImageDescriptor(ImageDescriptor.createFromImage(ResourceManager.getImage("icon32_document-save.png"))); //$NON-NLS-1$
// toolItem_Save.setAccelerator(SWT.CTRL | 'S');
// }
// mnu_File[0].add(menuItem_Open);
// mnu_File[0].add(toolItem_Save);
// mnu_File[0].add(new Separator());
// mnu_File[0].add(menuItem_Exit);
// mnu_File[0].getParent().update(true);
}
/**
* The Shell-Close-Event
*/
@Override
protected void handleShellCloseEvent() {
boolean unsavedProjectFiles = false;
Set<DatFile> unsavedFiles = new HashSet<DatFile>(Project.getUnsavedFiles());
for (DatFile df : unsavedFiles) {
if (!df.getText().equals(df.getOriginalText()) || df.isVirtual() && !df.getText().trim().isEmpty()) {
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_QUESTION | SWT.YES | SWT.CANCEL | SWT.NO);
messageBox.setText(I18n.DIALOG_UnsavedChangesTitle);
Object[] messageArguments = {df.getShortName()};
MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$
formatter.setLocale(View.LOCALE);
formatter.applyPattern(I18n.DIALOG_UnsavedChanges);
messageBox.setMessage(formatter.format(messageArguments));
int result = messageBox.open();
if (result == SWT.NO) {
// Remove file from tree
updateTree_removeEntry(df);
} else if (result == SWT.YES) {
if (df.save()) {
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.OK);
messageBoxError.setText(I18n.DIALOG_Error);
messageBoxError.setMessage(I18n.DIALOG_CantSaveFile);
messageBoxError.open();
cleanupClosedData();
updateTree_unsavedEntries();
return;
}
} else {
cleanupClosedData();
updateTree_unsavedEntries();
return;
}
}
}
Set<EditorTextWindow> ow = new HashSet<EditorTextWindow>(Project.getOpenTextWindows());
for (EditorTextWindow w : ow) {
w.getShell().close();
}
{
ArrayList<TreeItem> ta = getProjectParts().getItems();
for (TreeItem ti : ta) {
unsavedProjectFiles = unsavedProjectFiles || !((DatFile) ti.getData()).getText().trim().equals("") || !Project.getUnsavedFiles().contains(ti.getData()); //$NON-NLS-1$
}
}
{
ArrayList<TreeItem> ta = getProjectSubparts().getItems();
for (TreeItem ti : ta) {
unsavedProjectFiles = unsavedProjectFiles || !((DatFile) ti.getData()).getText().trim().equals("") || !Project.getUnsavedFiles().contains(ti.getData()); ; //$NON-NLS-1$
}
}
{
ArrayList<TreeItem> ta = getProjectPrimitives().getItems();
for (TreeItem ti : ta) {
unsavedProjectFiles = unsavedProjectFiles || !((DatFile) ti.getData()).getText().trim().equals("") || !Project.getUnsavedFiles().contains(ti.getData()); ; //$NON-NLS-1$
}
}
{
ArrayList<TreeItem> ta = getProjectPrimitives48().getItems();
for (TreeItem ti : ta) {
unsavedProjectFiles = unsavedProjectFiles || !((DatFile) ti.getData()).getText().trim().equals("") || !Project.getUnsavedFiles().contains(ti.getData()); ; //$NON-NLS-1$
}
}
if (unsavedProjectFiles && Project.isDefaultProject()) {
// Save new project here, if the project contains at least one non-empty file
boolean cancelIt = false;
boolean secondRun = false;
while (true) {
int result = IDialogConstants.CANCEL_ID;
if (secondRun) result = new NewProjectDialog(true).open();
if (result == IDialogConstants.OK_ID) {
while (new File(Project.getTempProjectPath()).isDirectory()) {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.YES | SWT.CANCEL | SWT.NO);
messageBoxError.setText(I18n.PROJECT_ProjectOverwriteTitle);
messageBoxError.setMessage(I18n.PROJECT_ProjectOverwrite);
int result2 = messageBoxError.open();
if (result2 == SWT.NO) {
result = new NewProjectDialog(true).open();
} else if (result2 == SWT.YES) {
break;
} else {
cancelIt = true;
break;
}
}
if (!cancelIt) {
Project.setProjectName(Project.getTempProjectName());
Project.setProjectPath(Project.getTempProjectPath());
NLogger.debug(getClass(), "Saving new project..."); //$NON-NLS-1$
if (!Project.save()) {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.OK);
messageBoxError.setText(I18n.DIALOG_Error);
messageBoxError.setMessage(I18n.DIALOG_CantSaveProject);
}
}
break;
} else {
secondRun = true;
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_QUESTION | SWT.YES | SWT.CANCEL | SWT.NO);
messageBox.setText(I18n.DIALOG_UnsavedChangesTitle);
Object[] messageArguments = {I18n.DIALOG_TheNewProject};
MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$
formatter.setLocale(View.LOCALE);
formatter.applyPattern(I18n.DIALOG_UnsavedChanges);
messageBox.setMessage(formatter.format(messageArguments));
int result2 = messageBox.open();
if (result2 == SWT.CANCEL) {
cancelIt = true;
break;
} else if (result2 == SWT.NO) {
break;
}
}
}
if (cancelIt) {
cleanupClosedData();
updateTree_unsavedEntries();
return;
}
}
// NEVER DELETE THIS!
final int s = renders.size();
for (int i = 0; i < s; i++) {
GLCanvas canvas = canvasList.get(i);
OpenGLRenderer renderer = renders.get(i);
if (!canvas.isCurrent()) {
canvas.setCurrent();
try {
GLContext.useContext(canvas);
} catch (LWJGLException e) {
NLogger.error(OpenGLRenderer.class, e);
}
}
renderer.dispose();
}
// Save the workbench
WorkbenchManager.saveWorkbench();
setReturnCode(CANCEL);
close();
}
/**
* @return The serializable window state of the Editor3DWindow
*/
public Editor3DWindowState getEditor3DWindowState() {
return this.editor3DWindowState;
}
/**
* @param editor3DWindowState
* The serializable window state of the Editor3DWindow
*/
public void setEditor3DWindowState(Editor3DWindowState editor3DWindowState) {
this.editor3DWindowState = editor3DWindowState;
}
/**
* @return The current Editor3DWindow instance
*/
public static Editor3DWindow getWindow() {
return Editor3DWindow.window;
}
/**
* Updates the tree for new unsaved entries
*/
public void updateTree_unsavedEntries() {
ArrayList<TreeItem> categories = new ArrayList<TreeItem>();
categories.add(this.treeItem_ProjectParts[0]);
categories.add(this.treeItem_ProjectSubparts[0]);
categories.add(this.treeItem_ProjectPrimitives[0]);
categories.add(this.treeItem_ProjectPrimitives48[0]);
categories.add(this.treeItem_UnofficialParts[0]);
categories.add(this.treeItem_UnofficialSubparts[0]);
categories.add(this.treeItem_UnofficialPrimitives[0]);
categories.add(this.treeItem_UnofficialPrimitives48[0]);
int counter = 0;
for (TreeItem item : categories) {
counter++;
ArrayList<TreeItem> datFileTreeItems = item.getItems();
for (TreeItem df : datFileTreeItems) {
DatFile d = (DatFile) df.getData();
StringBuilder nameSb = new StringBuilder(new File(d.getNewName()).getName());
final String d2 = d.getDescription();
if (counter < 5 && (!d.getNewName().startsWith(Project.getProjectPath()) || !d.getNewName().replace(Project.getProjectPath() + File.separator, "").contains(File.separator))) { //$NON-NLS-1$
nameSb.insert(0, "(!) "); //$NON-NLS-1$
}
// MARK For Debug Only!
// DatType t = d.getType();
// if (t == DatType.PART) {
// nameSb.append(" PART"); //$NON-NLS-1$
// } else if (t == DatType.SUBPART) {
// nameSb.append(" SUBPART"); //$NON-NLS-1$
// } else if (t == DatType.PRIMITIVE) {
// nameSb.append(" PRIMITIVE"); //$NON-NLS-1$
// } else if (t == DatType.PRIMITIVE48) {
// nameSb.append(" PRIMITIVE48"); //$NON-NLS-1$
// }
if (d2 != null)
nameSb.append(d2);
if (Project.getUnsavedFiles().contains(d)) {
df.setText("* " + nameSb.toString()); //$NON-NLS-1$
} else {
df.setText(nameSb.toString());
}
}
}
this.treeItem_Unsaved[0].removeAll();
Set<DatFile> unsaved = Project.getUnsavedFiles();
for (DatFile df : unsaved) {
TreeItem ti = new TreeItem(this.treeItem_Unsaved[0], SWT.NONE);
StringBuilder nameSb = new StringBuilder(new File(df.getNewName()).getName());
final String d = df.getDescription();
if (d != null)
nameSb.append(d);
ti.setText(nameSb.toString());
ti.setData(df);
}
this.treeParts[0].build();
this.treeParts[0].redraw();
}
/**
* Updates the tree for renamed entries
*/
@SuppressWarnings("unchecked")
public void updateTree_renamedEntries() {
HashMap<String, TreeItem> categories = new HashMap<String, TreeItem>();
HashMap<String, DatType> types = new HashMap<String, DatType>();
ArrayList<String> validPrefixes = new ArrayList<String>();
{
String s = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "PARTS" + File.separator + "S" + File.separator; //$NON-NLS-1$ //$NON-NLS-2$
validPrefixes.add(s);
categories.put(s, this.treeItem_UnofficialSubparts[0]);
types.put(s, DatType.SUBPART);
}
{
String s = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "parts" + File.separator + "s" + File.separator; //$NON-NLS-1$ //$NON-NLS-2$
validPrefixes.add(s);
categories.put(s, this.treeItem_UnofficialSubparts[0]);
types.put(s, DatType.SUBPART);
}
{
String s = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "PARTS" + File.separator; //$NON-NLS-1$
validPrefixes.add(s);
categories.put(s, this.treeItem_UnofficialParts[0]);
types.put(s, DatType.PART);
}
{
String s = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "parts" + File.separator; //$NON-NLS-1$
validPrefixes.add(s);
categories.put(s,this.treeItem_UnofficialParts[0]);
types.put(s, DatType.PART);
}
{
String s = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "P" + File.separator + "48" + File.separator; //$NON-NLS-1$ //$NON-NLS-2$
validPrefixes.add(s);
categories.put(s, this.treeItem_UnofficialPrimitives48[0]);
types.put(s, DatType.PRIMITIVE48);
}
{
String s = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "p" + File.separator + "48" + File.separator; //$NON-NLS-1$ //$NON-NLS-2$
validPrefixes.add(s);
categories.put(s, this.treeItem_UnofficialPrimitives48[0]);
types.put(s, DatType.PRIMITIVE48);
}
{
String s = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "P" + File.separator; //$NON-NLS-1$
validPrefixes.add(s);
categories.put(s, this.treeItem_UnofficialPrimitives[0]);
types.put(s, DatType.PRIMITIVE);
}
{
String s = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "p" + File.separator; //$NON-NLS-1$
validPrefixes.add(s);
categories.put(s, this.treeItem_UnofficialPrimitives[0]);
types.put(s, DatType.PRIMITIVE);
}
{
String s = Project.getProjectPath() + File.separator + "PARTS" + File.separator + "S" + File.separator; //$NON-NLS-1$ //$NON-NLS-2$
validPrefixes.add(s);
categories.put(s, this.treeItem_ProjectSubparts[0]);
types.put(s, DatType.SUBPART);
}
{
String s = Project.getProjectPath() + File.separator + "parts" + File.separator + "s" + File.separator; //$NON-NLS-1$ //$NON-NLS-2$
validPrefixes.add(s);
categories.put(s, this.treeItem_ProjectSubparts[0]);
types.put(s, DatType.SUBPART);
}
{
String s = Project.getProjectPath() + File.separator + "PARTS" + File.separator; //$NON-NLS-1$
validPrefixes.add(s);
categories.put(s, this.treeItem_ProjectParts[0]);
types.put(s, DatType.PART);
}
{
String s = Project.getProjectPath() + File.separator + "parts" + File.separator; //$NON-NLS-1$
validPrefixes.add(s);
categories.put(s, this.treeItem_ProjectParts[0]);
types.put(s, DatType.PART);
}
{
String s = Project.getProjectPath() + File.separator + "P" + File.separator + "48" + File.separator; //$NON-NLS-1$ //$NON-NLS-2$
validPrefixes.add(s);
categories.put(s, this.treeItem_ProjectPrimitives48[0]);
types.put(s, DatType.PRIMITIVE48);
}
{
String s = Project.getProjectPath() + File.separator + "p" + File.separator + "48" + File.separator; //$NON-NLS-1$ //$NON-NLS-2$
validPrefixes.add(s);
categories.put(s, this.treeItem_ProjectPrimitives48[0]);
types.put(s, DatType.PRIMITIVE48);
}
{
String s = Project.getProjectPath() + File.separator + "P" + File.separator; //$NON-NLS-1$
validPrefixes.add(s);
categories.put(s, this.treeItem_ProjectPrimitives[0]);
types.put(s, DatType.PRIMITIVE);
}
{
String s = Project.getProjectPath() + File.separator + "p" + File.separator; //$NON-NLS-1$
validPrefixes.add(s);
categories.put(s, this.treeItem_ProjectPrimitives[0]);
types.put(s, DatType.PRIMITIVE);
}
Collections.sort(validPrefixes, new Comp());
for (String prefix : validPrefixes) {
TreeItem item = categories.get(prefix);
ArrayList<DatFile> dats = (ArrayList<DatFile>) item.getData();
ArrayList<TreeItem> datFileTreeItems = item.getItems();
Set<TreeItem> itemsToRemove = new HashSet<TreeItem>();
for (TreeItem df : datFileTreeItems) {
DatFile d = (DatFile) df.getData();
String newName = d.getNewName();
String validPrefix = null;
for (String p2 : validPrefixes) {
if (newName.startsWith(p2)) {
validPrefix = p2;
break;
}
}
if (validPrefix != null) {
TreeItem item2 = categories.get(validPrefix);
if (!item2.equals(item)) {
itemsToRemove.add(df);
dats.remove(d);
((ArrayList<DatFile>) item2.getData()).add(d);
TreeItem nt = new TreeItem(item2, SWT.NONE);
nt.setText(df.getText());
d.setType(types.get(validPrefix));
nt.setData(d);
}
}
}
datFileTreeItems.removeAll(itemsToRemove);
}
this.treeParts[0].build();
this.treeParts[0].redraw();
}
private class Comp implements Comparator<String> {
@Override
public int compare(String o1, String o2) {
if (o1.length() < o2.length()) {
return 1;
} else if (o1.length() > o2.length()) {
return -1;
} else {
return 0;
}
}
}
/**
* Removes an item from the tree,<br><br>
* If it is open in a {@linkplain Composite3D}, this composite will be linked with a dummy file
* If it is open in a {@linkplain CompositeTab}, this composite will be closed
*
*/
public void updateTree_removeEntry(DatFile e) {
ArrayList<TreeItem> categories = new ArrayList<TreeItem>();
categories.add(this.treeItem_ProjectParts[0]);
categories.add(this.treeItem_ProjectSubparts[0]);
categories.add(this.treeItem_ProjectPrimitives[0]);
categories.add(this.treeItem_ProjectPrimitives48[0]);
categories.add(this.treeItem_UnofficialParts[0]);
categories.add(this.treeItem_UnofficialSubparts[0]);
categories.add(this.treeItem_UnofficialPrimitives[0]);
categories.add(this.treeItem_UnofficialPrimitives48[0]);
int counter = 0;
for (TreeItem item : categories) {
counter++;
ArrayList<TreeItem> datFileTreeItems = new ArrayList<TreeItem>(item.getItems());
for (TreeItem df : datFileTreeItems) {
DatFile d = (DatFile) df.getData();
if (e.equals(d)) {
item.getItems().remove(df);
} else {
StringBuilder nameSb = new StringBuilder(new File(d.getNewName()).getName());
final String d2 = d.getDescription();
if (counter < 5 && (!d.getNewName().startsWith(Project.getProjectPath()) || !d.getNewName().replace(Project.getProjectPath() + File.separator, "").contains(File.separator))) { //$NON-NLS-1$
nameSb.insert(0, "(!) "); //$NON-NLS-1$
}
if (d2 != null)
nameSb.append(d2);
if (Project.getUnsavedFiles().contains(d)) {
df.setText("* " + nameSb.toString()); //$NON-NLS-1$
} else {
df.setText(nameSb.toString());
}
}
}
}
this.treeItem_Unsaved[0].removeAll();
Project.removeUnsavedFile(e);
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(e)) {
c3d.unlinkData();
}
}
HashSet<EditorTextWindow> windows = new HashSet<EditorTextWindow>(Project.getOpenTextWindows());
for (EditorTextWindow win : windows) {
win.closeTabWithDatfile(e);
}
Set<DatFile> unsaved = Project.getUnsavedFiles();
for (DatFile df : unsaved) {
TreeItem ti = new TreeItem(this.treeItem_Unsaved[0], SWT.NONE);
StringBuilder nameSb = new StringBuilder(new File(df.getNewName()).getName());
final String d = df.getDescription();
if (d != null)
nameSb.append(d);
ti.setText(nameSb.toString());
ti.setData(df);
}
TreeItem[] folders = new TreeItem[8];
folders[0] = treeItem_ProjectParts[0];
folders[1] = treeItem_ProjectPrimitives[0];
folders[2] = treeItem_ProjectPrimitives48[0];
folders[3] = treeItem_ProjectSubparts[0];
folders[4] = treeItem_UnofficialParts[0];
folders[5] = treeItem_UnofficialPrimitives[0];
folders[6] = treeItem_UnofficialPrimitives48[0];
folders[7] = treeItem_UnofficialSubparts[0];
for (TreeItem folder : folders) {
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences =(ArrayList<DatFile>) folder.getData();
cachedReferences.remove(e);
}
this.treeParts[0].build();
this.treeParts[0].redraw();
}
// Helper functions
private void clickBtnTest(Button btn) {
WidgetSelectionHelper.unselectAllChildButtons((ToolItem) btn.getParent());
btn.setSelection(true);
}
private void clickSingleBtn(Button btn) {
boolean state = btn.getSelection();
WidgetSelectionHelper.unselectAllChildButtons((ToolItem) btn.getParent());
btn.setSelection(state);
}
public boolean isAddingSomething() {
return addingSomething;
}
public void setAddingSomething(boolean addingSomething) {
this.addingSomething = addingSomething;
for (OpenGLRenderer renderer : renders) {
renderer.getC3D().getLockableDatFileReference().getVertexManager().clearSelection();
}
}
public boolean isAddingVertices() {
return addingVertices;
}
public void setAddingVertices(boolean addingVertices) {
this.addingVertices = addingVertices;
}
public boolean isAddingLines() {
return addingLines;
}
public void setAddingLines(boolean addingLines) {
this.addingLines = addingLines;
}
public boolean isAddingTriangles() {
return addingTriangles;
}
public void setAddingTriangles(boolean addingTriangles) {
this.addingTriangles = addingTriangles;
}
public boolean isAddingQuads() {
return addingQuads;
}
public void setAddingQuads(boolean addingQuads) {
this.addingQuads = addingQuads;
}
public boolean isAddingCondlines() {
return addingCondlines;
}
public void setAddingCondlines(boolean addingCondlines) {
this.addingCondlines = addingCondlines;
}
public boolean isAddingSubfiles() {
return addingSubfiles;
}
public void setAddingSubfiles(boolean addingSubfiles) {
this.addingSubfiles = addingSubfiles;
}
public void disableAddAction() {
addingSomething = false;
addingVertices = false;
addingLines = false;
addingTriangles = false;
addingQuads = false;
addingCondlines = false;
addingSubfiles = false;
btn_AddVertex[0].setSelection(false);
btn_AddLine[0].setSelection(false);
btn_AddTriangle[0].setSelection(false);
btn_AddQuad[0].setSelection(false);
btn_AddCondline[0].setSelection(false);
btn_AddPrimitive[0].setSelection(false);
}
public TreeItem getProjectParts() {
return treeItem_ProjectParts[0];
}
public TreeItem getProjectPrimitives() {
return treeItem_ProjectPrimitives[0];
}
public TreeItem getProjectPrimitives48() {
return treeItem_ProjectPrimitives48[0];
}
public TreeItem getProjectSubparts() {
return treeItem_ProjectSubparts[0];
}
public TreeItem getUnofficialParts() {
return treeItem_UnofficialParts[0];
}
public TreeItem getUnofficialPrimitives() {
return treeItem_UnofficialPrimitives[0];
}
public TreeItem getUnofficialPrimitives48() {
return treeItem_UnofficialPrimitives48[0];
}
public TreeItem getUnofficialSubparts() {
return treeItem_UnofficialSubparts[0];
}
public TreeItem getOfficialParts() {
return treeItem_OfficialParts[0];
}
public TreeItem getOfficialPrimitives() {
return treeItem_OfficialPrimitives[0];
}
public TreeItem getOfficialPrimitives48() {
return treeItem_OfficialPrimitives48[0];
}
public TreeItem getOfficialSubparts() {
return treeItem_OfficialSubparts[0];
}
public TreeItem getUnsaved() {
return treeItem_Unsaved[0];
}
public int getWorkingType() {
return workingType;
}
public void setWorkingType(int workingMode) {
this.workingType = workingMode;
}
public boolean isMovingAdjacentData() {
return movingAdjacentData;
}
public void setMovingAdjacentData(boolean movingAdjacentData) {
this.movingAdjacentData = movingAdjacentData;
}
public int getWorkingAction() {
return workingAction;
}
public void setWorkingAction(int workingAction) {
this.workingAction = workingAction;
}
public int getTransformationMode() {
return transformationMode;
}
public boolean hasNoTransparentSelection() {
return noTransparentSelection;
}
public void setNoTransparentSelection(boolean noTransparentSelection) {
this.noTransparentSelection = noTransparentSelection;
}
public boolean hasBfcToggle() {
return bfcToggle;
}
public void setBfcToggle(boolean bfcToggle) {
this.bfcToggle = bfcToggle;
}
public GColour getLastUsedColour() {
return lastUsedColour;
}
public void setLastUsedColour(GColour lastUsedColour) {
this.lastUsedColour = lastUsedColour;
}
public void cleanupClosedData() {
Set<DatFile> openFiles = new HashSet<DatFile>(Project.getUnsavedFiles());
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
openFiles.add(c3d.getLockableDatFileReference());
}
for (EditorTextWindow w : Project.getOpenTextWindows()) {
for (CTabItem t : w.getTabFolder().getItems()) {
openFiles.add(((CompositeTab) t).getState().getFileNameObj());
}
}
Set<DatFile> deadFiles = new HashSet<DatFile>(Project.getParsedFiles());
deadFiles.removeAll(openFiles);
if (!deadFiles.isEmpty()) {
GData.CACHE_viewByProjection.clear();
GData.parsedLines.clear();
GData.CACHE_parsedFilesSource.clear();
}
for (DatFile datFile : deadFiles) {
datFile.disposeData();
}
if (!deadFiles.isEmpty()) {
// TODO Debug only System.gc();
}
}
public String getSearchCriteria() {
return txt_Search[0].getText();
}
public void resetSearch() {
search(""); //$NON-NLS-1$
}
public void search(final String word) {
this.getShell().getDisplay().asyncExec(new Runnable() {
@SuppressWarnings("unchecked")
@Override
public void run() {
String criteria = ".*" + word + ".*"; //$NON-NLS-1$ //$NON-NLS-2$
TreeItem[] folders = new TreeItem[12];
folders[0] = treeItem_OfficialParts[0];
folders[1] = treeItem_OfficialPrimitives[0];
folders[2] = treeItem_OfficialPrimitives48[0];
folders[3] = treeItem_OfficialSubparts[0];
folders[4] = treeItem_UnofficialParts[0];
folders[5] = treeItem_UnofficialPrimitives[0];
folders[6] = treeItem_UnofficialPrimitives48[0];
folders[7] = treeItem_UnofficialSubparts[0];
folders[8] = treeItem_ProjectParts[0];
folders[9] = treeItem_ProjectPrimitives[0];
folders[10] = treeItem_ProjectPrimitives48[0];
folders[11] = treeItem_ProjectSubparts[0];
if (folders[0].getData() == null) {
for (TreeItem folder : folders) {
folder.setData(new ArrayList<DatFile>());
for (TreeItem part : folder.getItems()) {
((ArrayList<DatFile>) folder.getData()).add((DatFile) part.getData());
}
}
}
try {
"42".matches(criteria); //$NON-NLS-1$
} catch (Exception ex) {
criteria = ".*"; //$NON-NLS-1$
}
for (int i = 0; i < 12; i++) {
TreeItem folder = folders[i];
folder.removeAll();
for (DatFile part : (ArrayList<DatFile>) folder.getData()) {
StringBuilder nameSb = new StringBuilder(new File(part.getNewName()).getName());
if (i > 7 && (!part.getNewName().startsWith(Project.getProjectPath()) || !part.getNewName().replace(Project.getProjectPath() + File.separator, "").contains(File.separator))) { //$NON-NLS-1$
nameSb.insert(0, "(!) "); //$NON-NLS-1$
}
final String d = part.getDescription();
if (d != null)
nameSb.append(d);
String name = nameSb.toString();
TreeItem finding = new TreeItem(folder, SWT.NONE);
// Save the path
finding.setData(part);
// Set the filename
if (Project.getUnsavedFiles().contains(part) || !part.getOldName().equals(part.getNewName())) {
// Insert asterisk if the file was
// modified
finding.setText("* " + name); //$NON-NLS-1$
} else {
finding.setText(name);
}
finding.setShown(!(d != null && d.startsWith(" - ~Moved to")) && name.matches(criteria)); //$NON-NLS-1$
}
}
folders[0].getParent().build();
folders[0].getParent().redraw();
folders[0].getParent().update();
}
});
}
public void closeAllComposite3D() {
ArrayList<OpenGLRenderer> renders2 = new ArrayList<OpenGLRenderer>(renders);
for (OpenGLRenderer renderer : renders2) {
Composite3D c3d = renderer.getC3D();
c3d.getModifier().closeView();
}
}
public TreeData getDatFileTreeData(DatFile df) {
TreeData result = new TreeData();
ArrayList<TreeItem> categories = new ArrayList<TreeItem>();
categories.add(this.treeItem_ProjectParts[0]);
categories.add(this.treeItem_ProjectSubparts[0]);
categories.add(this.treeItem_ProjectPrimitives[0]);
categories.add(this.treeItem_ProjectPrimitives48[0]);
categories.add(this.treeItem_UnofficialParts[0]);
categories.add(this.treeItem_UnofficialSubparts[0]);
categories.add(this.treeItem_UnofficialPrimitives[0]);
categories.add(this.treeItem_UnofficialPrimitives48[0]);
categories.add(this.treeItem_OfficialParts[0]);
categories.add(this.treeItem_OfficialSubparts[0]);
categories.add(this.treeItem_OfficialPrimitives[0]);
categories.add(this.treeItem_OfficialPrimitives48[0]);
categories.add(this.treeItem_Unsaved[0]);
for (TreeItem item : categories) {
ArrayList<TreeItem> datFileTreeItems = item.getItems();
for (TreeItem ti : datFileTreeItems) {
DatFile d = (DatFile) ti.getData();
if (df.equals(d)) {
result.setLocation(ti);
} else if (d.getShortName().equals(df.getShortName())) {
result.getLocationsWithSameShortFilenames().add(ti);
}
}
}
return result;
}
/**
* Updates the background picture tab
*/
public void updateBgPictureTab() {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (png == null) {
updatingPngPictureTab = true;
txt_PngPath[0].setText("---"); //$NON-NLS-1$
txt_PngPath[0].setToolTipText("---"); //$NON-NLS-1$
spn_PngX[0].setValue(BigDecimal.ZERO);
spn_PngY[0].setValue(BigDecimal.ZERO);
spn_PngZ[0].setValue(BigDecimal.ZERO);
spn_PngA1[0].setValue(BigDecimal.ZERO);
spn_PngA2[0].setValue(BigDecimal.ZERO);
spn_PngA3[0].setValue(BigDecimal.ZERO);
spn_PngSX[0].setValue(BigDecimal.ONE);
spn_PngSY[0].setValue(BigDecimal.ONE);
txt_PngPath[0].setEnabled(false);
btn_PngFocus[0].setEnabled(false);
btn_PngImage[0].setEnabled(false);
spn_PngX[0].setEnabled(false);
spn_PngY[0].setEnabled(false);
spn_PngZ[0].setEnabled(false);
spn_PngA1[0].setEnabled(false);
spn_PngA2[0].setEnabled(false);
spn_PngA3[0].setEnabled(false);
spn_PngSX[0].setEnabled(false);
spn_PngSY[0].setEnabled(false);
spn_PngA1[0].getParent().update();
updatingPngPictureTab = false;
return;
}
updatingPngPictureTab = true;
txt_PngPath[0].setEnabled(true);
btn_PngFocus[0].setEnabled(true);
btn_PngImage[0].setEnabled(true);
spn_PngX[0].setEnabled(true);
spn_PngY[0].setEnabled(true);
spn_PngZ[0].setEnabled(true);
spn_PngA1[0].setEnabled(true);
spn_PngA2[0].setEnabled(true);
spn_PngA3[0].setEnabled(true);
spn_PngSX[0].setEnabled(true);
spn_PngSY[0].setEnabled(true);
txt_PngPath[0].setText(png.texturePath);
txt_PngPath[0].setToolTipText(png.texturePath);
spn_PngX[0].setValue(png.offset.X);
spn_PngY[0].setValue(png.offset.Y);
spn_PngZ[0].setValue(png.offset.Z);
spn_PngA1[0].setValue(png.angleA);
spn_PngA2[0].setValue(png.angleB);
spn_PngA3[0].setValue(png.angleC);
spn_PngSX[0].setValue(png.scale.X);
spn_PngSY[0].setValue(png.scale.Y);
spn_PngA1[0].getParent().update();
updatingPngPictureTab = false;
return;
}
}
}
public void unselectAddSubfile() {
resetAddState();
btn_AddPrimitive[0].setSelection(false);
setAddingSubfiles(false);
setAddingSomething(false);
}
public DatFile createNewDatFile(Shell sh, OpenInWhat where) {
FileDialog fd = new FileDialog(sh, SWT.SAVE);
fd.setText("Create a new *.dat file"); //$NON-NLS-1$ I18N Needs translation!
if ("project".equals(Project.getProjectPath())) { //$NON-NLS-1$
try {
String path = LDPartEditor.class.getProtectionDomain().getCodeSource().getLocation().getPath();
String decodedPath = URLDecoder.decode(path, "UTF-8"); //$NON-NLS-1$
decodedPath = decodedPath.substring(0, decodedPath.length() - 4);
fd.setFilterPath(decodedPath + "project"); //$NON-NLS-1$
} catch (Exception consumed) {
fd.setFilterPath(Project.getProjectPath());
}
} else {
fd.setFilterPath(Project.getProjectPath());
}
String[] filterExt = { "*.dat", "*.*" }; //$NON-NLS-1$ //$NON-NLS-2$
fd.setFilterExtensions(filterExt);
String[] filterNames = { "LDraw Source File (*.dat)", "All Files" }; //$NON-NLS-1$ //$NON-NLS-2$ I18N Needs translation!
fd.setFilterNames(filterNames);
while (true) {
String selected = fd.open();
System.out.println(selected);
if (selected != null) {
// Check if its already created
DatFile df = new DatFile(selected);
if (isFileNameAllocated(selected, df, true)) {
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.RETRY | SWT.CANCEL);
messageBox.setText(I18n.DIALOG_AlreadyAllocatedNameTitle);
messageBox.setMessage(I18n.DIALOG_AlreadyAllocatedName);
int result = messageBox.open();
if (result == SWT.CANCEL) {
break;
}
} else {
TreeItem ti = new TreeItem(this.treeItem_ProjectParts[0], SWT.NONE);
StringBuilder nameSb = new StringBuilder(new File(df.getNewName()).getName());
nameSb.append("(new file)"); //$NON-NLS-1$ I18N
ti.setText(nameSb.toString());
ti.setData(df);
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences = (ArrayList<DatFile>) this.treeItem_ProjectParts[0].getData();
cachedReferences.add(df);
Project.addUnsavedFile(df);
updateTree_renamedEntries();
updateTree_unsavedEntries();
openDatFile(df, where, null);
return df;
}
} else {
break;
}
}
return null;
}
public DatFile openDatFile(Shell sh, OpenInWhat where) {
FileDialog fd = new FileDialog(sh, SWT.OPEN);
fd.setText("Open *.dat file"); //$NON-NLS-1$ I18N Needs translation!
if ("project".equals(Project.getProjectPath())) { //$NON-NLS-1$
try {
String path = LDPartEditor.class.getProtectionDomain().getCodeSource().getLocation().getPath();
String decodedPath = URLDecoder.decode(path, "UTF-8"); //$NON-NLS-1$
decodedPath = decodedPath.substring(0, decodedPath.length() - 4);
fd.setFilterPath(decodedPath + "project"); //$NON-NLS-1$
} catch (Exception consumed) {
fd.setFilterPath(Project.getProjectPath());
}
} else {
fd.setFilterPath(Project.getProjectPath());
}
String[] filterExt = { "*.dat", "*.*" }; //$NON-NLS-1$ //$NON-NLS-2$
fd.setFilterExtensions(filterExt);
String[] filterNames = { "LDraw Source File (*.dat)", "All Files" }; //$NON-NLS-1$ //$NON-NLS-2$ I18N Needs translation!
fd.setFilterNames(filterNames);
String selected = fd.open();
System.out.println(selected);
if (selected != null) {
// Check if its already created
DatType type = DatType.PART;
DatFile df = new DatFile(selected);
DatFile original = isFileNameAllocated2(selected, df);
if (original == null) {
// Type Check and Description Parsing!!
StringBuilder titleSb = new StringBuilder();
UTF8BufferedReader reader = null;
File f = new File(selected);
try {
reader = new UTF8BufferedReader(f.getAbsolutePath());
String title = reader.readLine();
if (title != null) {
title = title.trim();
if (title.length() > 0) {
titleSb.append(" -"); //$NON-NLS-1$
titleSb.append(title.substring(1));
}
}
while (true) {
String typ = reader.readLine();
if (typ != null) {
typ = typ.trim();
if (!typ.startsWith("0")) { //$NON-NLS-1$
break;
} else {
int i1 = typ.indexOf("!LDRAW_ORG"); //$NON-NLS-1$
if (i1 > -1) {
int i2;
i2 = typ.indexOf("Subpart"); //$NON-NLS-1$
if (i2 > -1 && i1 < i2) {
type = DatType.SUBPART;
break;
}
i2 = typ.indexOf("Part"); //$NON-NLS-1$
if (i2 > -1 && i1 < i2) {
type = DatType.PART;
break;
}
i2 = typ.indexOf("48_Primitive"); //$NON-NLS-1$
if (i2 > -1 && i1 < i2) {
type = DatType.PRIMITIVE48;
break;
}
i2 = typ.indexOf("Primitive"); //$NON-NLS-1$
if (i2 > -1 && i1 < i2) {
type = DatType.PRIMITIVE;
break;
}
}
}
} else {
break;
}
}
} catch (LDParsingException e) {
} catch (FileNotFoundException e) {
} catch (UnsupportedEncodingException e) {
} finally {
try {
if (reader != null)
reader.close();
} catch (LDParsingException e1) {
}
}
df = new DatFile(selected, titleSb.toString(), false, type);
df.setProjectFile(df.getNewName().startsWith(Project.getProjectPath()));
} else {
df.setProjectFile(df.getNewName().startsWith(Project.getProjectPath()));
if (original.isProjectFile()) {
openDatFile(df, where, null);
return df;
}
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences = (ArrayList<DatFile>) this.treeItem_ProjectParts[0].getData();
if (cachedReferences.contains(df)) {
openDatFile(df, where, null);
return df;
}
}
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences = (ArrayList<DatFile>) this.treeItem_ProjectSubparts[0].getData();
if (cachedReferences.contains(df)) {
openDatFile(df, where, null);
return df;
}
}
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences = (ArrayList<DatFile>) this.treeItem_ProjectPrimitives[0].getData();
if (cachedReferences.contains(df)) {
openDatFile(df, where, null);
return df;
}
}
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences = (ArrayList<DatFile>) this.treeItem_ProjectPrimitives48[0].getData();
if (cachedReferences.contains(df)) {
openDatFile(df, where, null);
return df;
}
}
type = original.getType();
df = original;
}
TreeItem ti;
switch (type) {
case PART:
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences = (ArrayList<DatFile>) this.treeItem_ProjectParts[0].getData();
cachedReferences.add(df);
}
ti = new TreeItem(this.treeItem_ProjectParts[0], SWT.NONE);
break;
case SUBPART:
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences = (ArrayList<DatFile>) this.treeItem_ProjectSubparts[0].getData();
cachedReferences.add(df);
}
ti = new TreeItem(this.treeItem_ProjectSubparts[0], SWT.NONE);
break;
case PRIMITIVE:
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences = (ArrayList<DatFile>) this.treeItem_ProjectPrimitives[0].getData();
cachedReferences.add(df);
}
ti = new TreeItem(this.treeItem_ProjectPrimitives[0], SWT.NONE);
break;
case PRIMITIVE48:
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences = (ArrayList<DatFile>) this.treeItem_ProjectPrimitives48[0].getData();
cachedReferences.add(df);
}
ti = new TreeItem(this.treeItem_ProjectPrimitives48[0], SWT.NONE);
break;
default:
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences = (ArrayList<DatFile>) this.treeItem_ProjectParts[0].getData();
cachedReferences.add(df);
}
ti = new TreeItem(this.treeItem_ProjectParts[0], SWT.NONE);
break;
}
StringBuilder nameSb = new StringBuilder(new File(df.getNewName()).getName());
nameSb.append("(new file)"); //$NON-NLS-1$ I18N
ti.setText(nameSb.toString());
ti.setData(df);
updateTree_unsavedEntries();
openDatFile(df, where, null);
return df;
}
return null;
}
public boolean openDatFile(DatFile df, OpenInWhat where, EditorTextWindow tWin) {
if (where == OpenInWhat.EDITOR_3D || where == OpenInWhat.EDITOR_TEXT_AND_3D) {
if (renders.isEmpty()) {
if ("%EMPTY%".equals(Editor3DWindow.getSashForm().getChildren()[1].getData())) { //$NON-NLS-1$
int[] mainSashWeights = Editor3DWindow.getSashForm().getWeights();
Editor3DWindow.getSashForm().getChildren()[1].dispose();
CompositeContainer cmp_Container = new CompositeContainer(Editor3DWindow.getSashForm(), false);
cmp_Container.moveBelow(Editor3DWindow.getSashForm().getChildren()[0]);
df.parseForData();
final VertexManager vm = df.getVertexManager();
Project.setFileToEdit(df);
cmp_Container.getComposite3D().setLockableDatFileReference(df);
vm.zoomToFit(cmp_Container.getComposite3D());
Editor3DWindow.getSashForm().getParent().layout();
Editor3DWindow.getSashForm().setWeights(mainSashWeights);
}
} else {
boolean canUpdate = false;
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (!c3d.isDatFileLockedOnDisplay()) {
canUpdate = true;
break;
}
}
if (canUpdate) {
final VertexManager vm = df.getVertexManager();
if (vm.isModified()) {
df.setText(df.getText());
}
df.parseForData();
Project.setFileToEdit(df);
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (!c3d.isDatFileLockedOnDisplay()) {
c3d.setLockableDatFileReference(df);
vm.zoomToFit(c3d);
}
}
}
}
}
if (where == OpenInWhat.EDITOR_TEXT || where == OpenInWhat.EDITOR_TEXT_AND_3D) {
for (EditorTextWindow w : Project.getOpenTextWindows()) {
for (CTabItem t : w.getTabFolder().getItems()) {
if (df.equals(((CompositeTab) t).getState().getFileNameObj())) {
w.getTabFolder().setSelection(t);
((CompositeTab) t).getControl().getShell().forceActive();
w.open();
return w == tWin;
}
}
}
if (tWin == null) {
// Project.getParsedFiles().add(df); IS NECESSARY HERE
Project.getParsedFiles().add(df);
new EditorTextWindow().run(df);
}
}
return false;
}
public void disableSelectionTab() {
updatingSelectionTab = true;
txt_Line[0].setText(""); //$NON-NLS-1$
spn_SelectionX1[0].setEnabled(false);
spn_SelectionY1[0].setEnabled(false);
spn_SelectionZ1[0].setEnabled(false);
spn_SelectionX2[0].setEnabled(false);
spn_SelectionY2[0].setEnabled(false);
spn_SelectionZ2[0].setEnabled(false);
spn_SelectionX3[0].setEnabled(false);
spn_SelectionY3[0].setEnabled(false);
spn_SelectionZ3[0].setEnabled(false);
spn_SelectionX4[0].setEnabled(false);
spn_SelectionY4[0].setEnabled(false);
spn_SelectionZ4[0].setEnabled(false);
spn_SelectionX1[0].setValue(BigDecimal.ZERO);
spn_SelectionY1[0].setValue(BigDecimal.ZERO);
spn_SelectionZ1[0].setValue(BigDecimal.ZERO);
spn_SelectionX2[0].setValue(BigDecimal.ZERO);
spn_SelectionY2[0].setValue(BigDecimal.ZERO);
spn_SelectionZ2[0].setValue(BigDecimal.ZERO);
spn_SelectionX3[0].setValue(BigDecimal.ZERO);
spn_SelectionY3[0].setValue(BigDecimal.ZERO);
spn_SelectionZ3[0].setValue(BigDecimal.ZERO);
spn_SelectionX4[0].setValue(BigDecimal.ZERO);
spn_SelectionY4[0].setValue(BigDecimal.ZERO);
spn_SelectionZ4[0].setValue(BigDecimal.ZERO);
lbl_SelectionX1[0].setText(I18n.EDITOR3D_PositionX1);
lbl_SelectionY1[0].setText(I18n.EDITOR3D_PositionY1);
lbl_SelectionZ1[0].setText(I18n.EDITOR3D_PositionZ1);
lbl_SelectionX2[0].setText(I18n.EDITOR3D_PositionX2);
lbl_SelectionY2[0].setText(I18n.EDITOR3D_PositionY2);
lbl_SelectionZ2[0].setText(I18n.EDITOR3D_PositionZ2);
lbl_SelectionX3[0].setText(I18n.EDITOR3D_PositionX3);
lbl_SelectionY3[0].setText(I18n.EDITOR3D_PositionY3);
lbl_SelectionZ3[0].setText(I18n.EDITOR3D_PositionZ3);
lbl_SelectionX4[0].setText(I18n.EDITOR3D_PositionX4);
lbl_SelectionY4[0].setText(I18n.EDITOR3D_PositionY4);
lbl_SelectionZ4[0].setText(I18n.EDITOR3D_PositionZ4);
updatingSelectionTab = false;
}
public static ArrayList<OpenGLRenderer> getRenders() {
return renders;
}
public SearchWindow getSearchWindow() {
return searchWindow;
}
public void setSearchWindow(SearchWindow searchWindow) {
this.searchWindow = searchWindow;
}
private void loadSelectorSettings() {
sels.setColour(mntm_WithSameColour[0].getSelection());
sels.setEdgeStop(mntm_StopAtEdges[0].getSelection());
sels.setHidden(mntm_WithHiddenData[0].getSelection());
sels.setNoSubfiles(mntm_ExceptSubfiles[0].getSelection());
sels.setOrientation(mntm_WithSameOrientation[0].getSelection());
sels.setDistance(mntm_WithAccuracy[0].getSelection());
sels.setWholeSubfiles(mntm_WithWholeSubfiles[0].getSelection());
sels.setVertices(mntm_SVertices[0].getSelection());
sels.setLines(mntm_SLines[0].getSelection());
sels.setTriangles(mntm_STriangles[0].getSelection());
sels.setQuads(mntm_SQuads[0].getSelection());
sels.setCondlines(mntm_SCLines[0].getSelection());
}
private boolean isFileNameAllocated(String dir, DatFile df, boolean createNew) {
TreeItem[] folders = new TreeItem[12];
folders[0] = treeItem_OfficialParts[0];
folders[1] = treeItem_OfficialPrimitives[0];
folders[2] = treeItem_OfficialPrimitives48[0];
folders[3] = treeItem_OfficialSubparts[0];
folders[4] = treeItem_UnofficialParts[0];
folders[5] = treeItem_UnofficialPrimitives[0];
folders[6] = treeItem_UnofficialPrimitives48[0];
folders[7] = treeItem_UnofficialSubparts[0];
folders[8] = treeItem_ProjectParts[0];
folders[9] = treeItem_ProjectPrimitives[0];
folders[10] = treeItem_ProjectPrimitives48[0];
folders[11] = treeItem_ProjectSubparts[0];
for (TreeItem folder : folders) {
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences =(ArrayList<DatFile>) folder.getData();
for (DatFile d : cachedReferences) {
if (createNew || !df.equals(d)) {
if (dir.equals(d.getOldName()) || dir.equals(d.getNewName())) {
return true;
}
}
}
}
return false;
}
private DatFile isFileNameAllocated2(String dir, DatFile df) {
TreeItem[] folders = new TreeItem[12];
folders[0] = treeItem_OfficialParts[0];
folders[1] = treeItem_OfficialPrimitives[0];
folders[2] = treeItem_OfficialPrimitives48[0];
folders[3] = treeItem_OfficialSubparts[0];
folders[4] = treeItem_UnofficialParts[0];
folders[5] = treeItem_UnofficialPrimitives[0];
folders[6] = treeItem_UnofficialPrimitives48[0];
folders[7] = treeItem_UnofficialSubparts[0];
folders[8] = treeItem_ProjectParts[0];
folders[9] = treeItem_ProjectPrimitives[0];
folders[10] = treeItem_ProjectPrimitives48[0];
folders[11] = treeItem_ProjectSubparts[0];
for (TreeItem folder : folders) {
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences =(ArrayList<DatFile>) folder.getData();
for (DatFile d : cachedReferences) {
if (dir.equals(d.getOldName()) || dir.equals(d.getNewName())) {
return d;
}
}
}
return null;
}
}
| src/org/nschmidt/ldparteditor/shells/editor3d/Editor3DWindow.java | /* MIT - License
Copyright (c) 2012 - this year, Nils Schmidt
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
package org.nschmidt.ldparteditor.shells.editor3d;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.net.URLDecoder;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Set;
import java.util.TreeSet;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.CTabItem;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.PaintEvent;
import org.eclipse.swt.events.PaintListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.FontData;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.opengl.GLCanvas;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.FileDialog;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Menu;
import org.eclipse.swt.widgets.MenuItem;
import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.wb.swt.SWTResourceManager;
import org.lwjgl.LWJGLException;
import org.lwjgl.opengl.GLContext;
import org.lwjgl.util.vector.Matrix4f;
import org.lwjgl.util.vector.Vector3f;
import org.lwjgl.util.vector.Vector4f;
import org.nschmidt.ldparteditor.composites.Composite3D;
import org.nschmidt.ldparteditor.composites.CompositeContainer;
import org.nschmidt.ldparteditor.composites.ToolItem;
import org.nschmidt.ldparteditor.composites.compositetab.CompositeTab;
import org.nschmidt.ldparteditor.data.DatFile;
import org.nschmidt.ldparteditor.data.DatType;
import org.nschmidt.ldparteditor.data.GColour;
import org.nschmidt.ldparteditor.data.GData;
import org.nschmidt.ldparteditor.data.GData1;
import org.nschmidt.ldparteditor.data.GDataPNG;
import org.nschmidt.ldparteditor.data.LibraryManager;
import org.nschmidt.ldparteditor.data.Matrix;
import org.nschmidt.ldparteditor.data.ReferenceParser;
import org.nschmidt.ldparteditor.data.Vertex;
import org.nschmidt.ldparteditor.data.VertexManager;
import org.nschmidt.ldparteditor.dialogs.colour.ColourDialog;
import org.nschmidt.ldparteditor.dialogs.copy.CopyDialog;
import org.nschmidt.ldparteditor.dialogs.edger2.EdgerDialog;
import org.nschmidt.ldparteditor.dialogs.intersector.IntersectorDialog;
import org.nschmidt.ldparteditor.dialogs.isecalc.IsecalcDialog;
import org.nschmidt.ldparteditor.dialogs.lines2pattern.Lines2PatternDialog;
import org.nschmidt.ldparteditor.dialogs.newproject.NewProjectDialog;
import org.nschmidt.ldparteditor.dialogs.pathtruder.PathTruderDialog;
import org.nschmidt.ldparteditor.dialogs.rectifier.RectifierDialog;
import org.nschmidt.ldparteditor.dialogs.rotate.RotateDialog;
import org.nschmidt.ldparteditor.dialogs.round.RoundDialog;
import org.nschmidt.ldparteditor.dialogs.scale.ScaleDialog;
import org.nschmidt.ldparteditor.dialogs.setcoordinates.CoordinatesDialog;
import org.nschmidt.ldparteditor.dialogs.slicerpro.SlicerProDialog;
import org.nschmidt.ldparteditor.dialogs.symsplitter.SymSplitterDialog;
import org.nschmidt.ldparteditor.dialogs.translate.TranslateDialog;
import org.nschmidt.ldparteditor.dialogs.txt2dat.Txt2DatDialog;
import org.nschmidt.ldparteditor.dialogs.unificator.UnificatorDialog;
import org.nschmidt.ldparteditor.dialogs.value.ValueDialog;
import org.nschmidt.ldparteditor.dialogs.value.ValueDialogInt;
import org.nschmidt.ldparteditor.enums.GLPrimitives;
import org.nschmidt.ldparteditor.enums.MergeTo;
import org.nschmidt.ldparteditor.enums.MouseButton;
import org.nschmidt.ldparteditor.enums.OpenInWhat;
import org.nschmidt.ldparteditor.enums.Threshold;
import org.nschmidt.ldparteditor.enums.TransformationMode;
import org.nschmidt.ldparteditor.enums.View;
import org.nschmidt.ldparteditor.enums.WorkingMode;
import org.nschmidt.ldparteditor.helpers.Manipulator;
import org.nschmidt.ldparteditor.helpers.ShellHelper;
import org.nschmidt.ldparteditor.helpers.Version;
import org.nschmidt.ldparteditor.helpers.WidgetSelectionHelper;
import org.nschmidt.ldparteditor.helpers.composite3d.Edger2Settings;
import org.nschmidt.ldparteditor.helpers.composite3d.IntersectorSettings;
import org.nschmidt.ldparteditor.helpers.composite3d.IsecalcSettings;
import org.nschmidt.ldparteditor.helpers.composite3d.PathTruderSettings;
import org.nschmidt.ldparteditor.helpers.composite3d.RectifierSettings;
import org.nschmidt.ldparteditor.helpers.composite3d.SelectorSettings;
import org.nschmidt.ldparteditor.helpers.composite3d.SlicerProSettings;
import org.nschmidt.ldparteditor.helpers.composite3d.SymSplitterSettings;
import org.nschmidt.ldparteditor.helpers.composite3d.TreeData;
import org.nschmidt.ldparteditor.helpers.composite3d.Txt2DatSettings;
import org.nschmidt.ldparteditor.helpers.composite3d.UnificatorSettings;
import org.nschmidt.ldparteditor.helpers.composite3d.ViewIdleManager;
import org.nschmidt.ldparteditor.helpers.compositetext.ProjectActions;
import org.nschmidt.ldparteditor.helpers.compositetext.SubfileCompiler;
import org.nschmidt.ldparteditor.helpers.math.MathHelper;
import org.nschmidt.ldparteditor.helpers.math.Vector3d;
import org.nschmidt.ldparteditor.i18n.I18n;
import org.nschmidt.ldparteditor.logger.NLogger;
import org.nschmidt.ldparteditor.main.LDPartEditor;
import org.nschmidt.ldparteditor.opengl.OpenGLRenderer;
import org.nschmidt.ldparteditor.project.Project;
import org.nschmidt.ldparteditor.resources.ResourceManager;
import org.nschmidt.ldparteditor.shells.editormeta.EditorMetaWindow;
import org.nschmidt.ldparteditor.shells.editortext.EditorTextWindow;
import org.nschmidt.ldparteditor.shells.searchnreplace.SearchWindow;
import org.nschmidt.ldparteditor.text.LDParsingException;
import org.nschmidt.ldparteditor.text.References;
import org.nschmidt.ldparteditor.text.TextTriangulator;
import org.nschmidt.ldparteditor.text.UTF8BufferedReader;
import org.nschmidt.ldparteditor.widgets.BigDecimalSpinner;
import org.nschmidt.ldparteditor.widgets.TreeItem;
import org.nschmidt.ldparteditor.widgets.ValueChangeAdapter;
import org.nschmidt.ldparteditor.workbench.Editor3DWindowState;
import org.nschmidt.ldparteditor.workbench.WorkbenchManager;
/**
* The 3D editor window
* <p>
* Note: This class should be instantiated once, it defines all listeners and
* part of the business logic.
*
* @author nils
*
*/
public class Editor3DWindow extends Editor3DDesign {
/** The window state of this window */
private Editor3DWindowState editor3DWindowState;
/** The reference to this window */
private static Editor3DWindow window;
/** The window state of this window */
private SearchWindow searchWindow;
public static final ArrayList<GLCanvas> canvasList = new ArrayList<GLCanvas>();
public static final ArrayList<OpenGLRenderer> renders = new ArrayList<OpenGLRenderer>();
private boolean addingSomething = false;
private boolean addingVertices = false;
private boolean addingLines = false;
private boolean addingTriangles = false;
private boolean addingQuads = false;
private boolean addingCondlines = false;
private boolean addingSubfiles = false;
private boolean movingAdjacentData = false;
private boolean noTransparentSelection = false;
private boolean bfcToggle = false;
private int workingType = WorkingMode.VERTICES;
private int workingAction = WorkingMode.SELECT;
private GColour lastUsedColour = new GColour(16, .5f, .5f, .5f, 1f);
private int transformationMode = WorkingMode.LOCAL;
private int snapSize = 1;
private Txt2DatSettings ts = new Txt2DatSettings();
private Edger2Settings es = new Edger2Settings();
private RectifierSettings rs = new RectifierSettings();
private IsecalcSettings is = new IsecalcSettings();
private SlicerProSettings ss = new SlicerProSettings();
private IntersectorSettings ins = new IntersectorSettings();
private PathTruderSettings ps = new PathTruderSettings();
private SymSplitterSettings sims = new SymSplitterSettings();
private UnificatorSettings us = new UnificatorSettings();
private SelectorSettings sels = new SelectorSettings();
private boolean updatingPngPictureTab;
private int pngPictureUpdateCounter = 0;
private final EditorMetaWindow metaWindow = new EditorMetaWindow();
private boolean updatingSelectionTab = true;
/**
* Create the application window.
*/
public Editor3DWindow() {
super();
final int[] i = new int[1];
final GLCanvas[] first1 = ViewIdleManager.firstCanvas;
final OpenGLRenderer[] first2 = ViewIdleManager.firstRender;
final int[] intervall = new int[] { 10 };
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
if (ViewIdleManager.pause[0].get()) {
ViewIdleManager.pause[0].set(false);
intervall[0] = 500;
} else {
final int cs = canvasList.size();
if (i[0] < cs && cs > 0) {
GLCanvas canvas;
if (!canvasList.get(i[0]).equals(first1[0])) {
canvas = first1[0];
if (canvas != null && !canvas.isDisposed()) {
first2[0].drawScene();
first1[0] = null;
first2[0] = null;
}
}
canvas = canvasList.get(i[0]);
if (!canvas.isDisposed()) {
if (renders.get(i[0]).getC3D().getRenderMode() != 5) {
renders.get(i[0]).drawScene();
}
} else {
canvasList.remove(i[0]);
renders.remove(i[0]);
}
i[0]++;
} else {
i[0] = 0;
}
}
Display.getCurrent().timerExec(intervall[0], this);
intervall[0] = 10;
}
});
}
/**
* Run a fresh instance of this window
*/
public void run() {
window = this;
// Load the window state data
editor3DWindowState = WorkbenchManager.getEditor3DWindowState();
WorkbenchManager.setEditor3DWindow(this);
// Closing this window causes the whole application to quit
this.setBlockOnOpen(true);
// Creating the window to get the shell
this.create();
final Shell sh = this.getShell();
sh.setText(Version.getApplicationName());
sh.setImage(ResourceManager.getImage("imgDuke2.png")); //$NON-NLS-1$
sh.setMinimumSize(640, 480);
sh.setBounds(this.editor3DWindowState.getWindowState().getSizeAndPosition());
if (this.editor3DWindowState.getWindowState().isCentered()) {
ShellHelper.centerShellOnPrimaryScreen(sh);
}
// Maximize has to be called asynchronously
sh.getDisplay().asyncExec(new Runnable() {
@Override
public void run() {
sh.setMaximized(editor3DWindowState.getWindowState().isMaximized());
}
});
// MARK All final listeners will be configured here..
// First, create all menu actions.
createActions();
btn_Sync[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
resetSearch();
int[][] stats = new int[13][3];
stats[0] = LibraryManager.syncProjectElements(treeItem_Project[0]);
stats[5] = LibraryManager.syncUnofficialParts(treeItem_UnofficialParts[0]);
stats[6] = LibraryManager.syncUnofficialSubparts(treeItem_UnofficialSubparts[0]);
stats[7] = LibraryManager.syncUnofficialPrimitives(treeItem_UnofficialPrimitives[0]);
stats[8] = LibraryManager.syncUnofficialHiResPrimitives(treeItem_UnofficialPrimitives48[0]);
stats[9] = LibraryManager.syncOfficialParts(treeItem_OfficialParts[0]);
stats[10] = LibraryManager.syncOfficialSubparts(treeItem_OfficialSubparts[0]);
stats[11] = LibraryManager.syncOfficialPrimitives(treeItem_OfficialPrimitives[0]);
stats[12] = LibraryManager.syncOfficialHiResPrimitives(treeItem_OfficialPrimitives48[0]);
int additions = 0;
int deletions = 0;
int conflicts = 0;
for (int[] is : stats) {
additions += is[0];
deletions += is[1];
conflicts += is[2];
}
txt_Search[0].setText(" "); //$NON-NLS-1$
txt_Search[0].setText(""); //$NON-NLS-1$
Set<DatFile> dfs = new HashSet<DatFile>();
for (OpenGLRenderer renderer : renders) {
dfs.add(renderer.getC3D().getLockableDatFileReference());
}
for (EditorTextWindow w : Project.getOpenTextWindows()) {
for (CTabItem t : w.getTabFolder().getItems()) {
DatFile txtDat = ((CompositeTab) t).getState().getFileNameObj();
if (txtDat != null) {
dfs.add(txtDat);
}
}
}
for (DatFile df : dfs) {
SubfileCompiler.compile(df);
}
for (EditorTextWindow w : Project.getOpenTextWindows()) {
for (CTabItem t : w.getTabFolder().getItems()) {
DatFile txtDat = ((CompositeTab) t).getState().getFileNameObj();
if (txtDat != null) {
((CompositeTab) t).parseForError();
((CompositeTab) t).getTextComposite().redraw();
((CompositeTab) t).getState().getTab().setText(((CompositeTab) t).getState().getFilenameWithStar());
}
}
}
updateTree_unsavedEntries();
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_INFORMATION | SWT.OK);
messageBox.setText(I18n.DIALOG_SyncTitle);
Object[] messageArguments = {additions, deletions, conflicts};
MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$
formatter.setLocale(View.LOCALE);
formatter.applyPattern(I18n.DIALOG_Sync);
messageBox.setMessage(formatter.format(messageArguments));
messageBox.open();
}
});
btn_New[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
ProjectActions.createNewProject(Editor3DWindow.getWindow(), false);
}
});
btn_Open[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (ProjectActions.openProject()) {
Project.create(false);
treeItem_Project[0].setData(Project.getProjectPath());
resetSearch();
LibraryManager.readProjectPartsParent(treeItem_ProjectParts[0]);
LibraryManager.readProjectParts(treeItem_ProjectParts[0]);
LibraryManager.readProjectSubparts(treeItem_ProjectSubparts[0]);
LibraryManager.readProjectPrimitives(treeItem_ProjectPrimitives[0]);
LibraryManager.readProjectHiResPrimitives(treeItem_ProjectPrimitives48[0]);
treeItem_OfficialParts[0].setData(null);
txt_Search[0].setText(" "); //$NON-NLS-1$
txt_Search[0].setText(""); //$NON-NLS-1$
updateTree_unsavedEntries();
}
}
});
btn_Save[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (treeParts[0].getSelectionCount() == 1) {
if (treeParts[0].getSelection()[0].getData() instanceof DatFile) {
DatFile df = (DatFile) treeParts[0].getSelection()[0].getData();
if (!df.isReadOnly() && Project.getUnsavedFiles().contains(df)) {
if (df.save()) {
Editor3DWindow.getWindow().updateTree_unsavedEntries();
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.OK);
messageBoxError.setText(I18n.DIALOG_Error);
messageBoxError.setMessage(I18n.DIALOG_CantSaveFile);
messageBoxError.open();
Editor3DWindow.getWindow().updateTree_unsavedEntries();
}
}
} else if (treeParts[0].getSelection()[0].getData() instanceof ArrayList<?>) {
NLogger.debug(getClass(), "Saving all files from this group"); //$NON-NLS-1$
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> dfs = (ArrayList<DatFile>) treeParts[0].getSelection()[0].getData();
for (DatFile df : dfs) {
if (!df.isReadOnly() && Project.getUnsavedFiles().contains(df)) {
if (df.save()) {
Project.removeUnsavedFile(df);
Editor3DWindow.getWindow().updateTree_unsavedEntries();
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.OK);
messageBoxError.setText(I18n.DIALOG_Error);
messageBoxError.setMessage(I18n.DIALOG_CantSaveFile);
messageBoxError.open();
Editor3DWindow.getWindow().updateTree_unsavedEntries();
}
}
}
}
} else if (treeParts[0].getSelection()[0].getData() instanceof String) {
if (treeParts[0].getSelection()[0].equals(treeItem_Project[0])) {
NLogger.debug(getClass(), "Save the project..."); //$NON-NLS-1$
if (Project.isDefaultProject()) {
ProjectActions.createNewProject(Editor3DWindow.getWindow(), true);
}
iterateOverItems(treeItem_ProjectParts[0]);
iterateOverItems(treeItem_ProjectSubparts[0]);
iterateOverItems(treeItem_ProjectPrimitives[0]);
iterateOverItems(treeItem_ProjectPrimitives48[0]);
} else if (treeParts[0].getSelection()[0].equals(treeItem_Unofficial[0])) {
iterateOverItems(treeItem_UnofficialParts[0]);
iterateOverItems(treeItem_UnofficialSubparts[0]);
iterateOverItems(treeItem_UnofficialPrimitives[0]);
iterateOverItems(treeItem_UnofficialPrimitives48[0]);
}
NLogger.debug(getClass(), "Saving all files from this group to"); //$NON-NLS-1$
NLogger.debug(getClass(), (String) treeParts[0].getSelection()[0].getData());
}
} else {
NLogger.debug(getClass(), "Save the project..."); //$NON-NLS-1$
if (Project.isDefaultProject()) {
ProjectActions.createNewProject(Editor3DWindow.getWindow(), true);
}
}
}
private void iterateOverItems(TreeItem ti) {
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> dfs = (ArrayList<DatFile>) ti.getData();
for (DatFile df : dfs) {
if (!df.isReadOnly() && Project.getUnsavedFiles().contains(df)) {
if (df.save()) {
Project.removeUnsavedFile(df);
Editor3DWindow.getWindow().updateTree_unsavedEntries();
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.OK);
messageBoxError.setText(I18n.DIALOG_Error);
messageBoxError.setMessage(I18n.DIALOG_CantSaveFile);
messageBoxError.open();
Editor3DWindow.getWindow().updateTree_unsavedEntries();
}
}
}
}
}
});
btn_SaveAll[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
HashSet<DatFile> dfs = new HashSet<DatFile>(Project.getUnsavedFiles());
for (DatFile df : dfs) {
if (!df.isReadOnly()) {
if (df.save()) {
Project.removeUnsavedFile(df);
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.OK);
messageBoxError.setText(I18n.DIALOG_Error);
messageBoxError.setMessage(I18n.DIALOG_CantSaveFile);
messageBoxError.open();
}
}
}
if (Project.isDefaultProject()) {
ProjectActions.createNewProject(getWindow(), true);
}
Editor3DWindow.getWindow().updateTree_unsavedEntries();
}
});
btn_NewDat[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
createNewDatFile(getShell(), OpenInWhat.EDITOR_TEXT_AND_3D);
}
});
btn_OpenDat[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
openDatFile(getShell(), OpenInWhat.EDITOR_TEXT_AND_3D);
}
});
btn_Select[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickBtnTest(btn_Select[0]);
workingAction = WorkingMode.SELECT;
}
});
btn_Move[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickBtnTest(btn_Move[0]);
workingAction = WorkingMode.MOVE;
}
});
btn_Rotate[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickBtnTest(btn_Rotate[0]);
workingAction = WorkingMode.ROTATE;
}
});
btn_Scale[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickBtnTest(btn_Scale[0]);
workingAction = WorkingMode.SCALE;
}
});
btn_Combined[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickBtnTest(btn_Combined[0]);
workingAction = WorkingMode.COMBINED;
}
});
btn_Local[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickBtnTest(btn_Local[0]);
transformationMode = WorkingMode.LOCAL;
}
});
btn_Global[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickBtnTest(btn_Global[0]);
transformationMode = WorkingMode.GLOBAL;
}
});
btn_Vertices[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickBtnTest(btn_Vertices[0]);
setWorkingType(WorkingMode.VERTICES);
}
});
btn_TrisNQuads[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickBtnTest(btn_TrisNQuads[0]);
setWorkingType(WorkingMode.FACES);
}
});
btn_Lines[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickBtnTest(btn_Lines[0]);
setWorkingType(WorkingMode.LINES);
}
});
btn_Subfiles[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
clickBtnTest(btn_Subfiles[0]);
setWorkingType(WorkingMode.SUBFILES);
}
}
});
btn_AddComment[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (!metaWindow.isOpened()) {
metaWindow.run();
} else {
metaWindow.open();
}
}
});
btn_AddVertex[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
resetAddState();
clickSingleBtn(btn_AddVertex[0]);
setAddingVertices(btn_AddVertex[0].getSelection());
setAddingSomething(isAddingVertices());
}
});
btn_AddPrimitive[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
resetAddState();
setAddingSubfiles(btn_AddPrimitive[0].getSelection());
setAddingSomething(isAddingSubfiles());
clickSingleBtn(btn_AddPrimitive[0]);
}
});
btn_AddLine[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
resetAddState();
setAddingLines(btn_AddLine[0].getSelection());
setAddingSomething(isAddingLines());
clickSingleBtn(btn_AddLine[0]);
}
});
btn_AddTriangle[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
resetAddState();
setAddingTriangles(btn_AddTriangle[0].getSelection());
setAddingSomething(isAddingTriangles());
clickSingleBtn(btn_AddTriangle[0]);
}
});
btn_AddQuad[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
resetAddState();
setAddingQuads(btn_AddQuad[0].getSelection());
setAddingSomething(isAddingQuads());
clickSingleBtn(btn_AddQuad[0]);
}
});
btn_AddCondline[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
resetAddState();
setAddingCondlines(btn_AddCondline[0].getSelection());
setAddingSomething(isAddingCondlines());
clickSingleBtn(btn_AddCondline[0]);
}
});
btn_MoveAdjacentData[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
clickSingleBtn(btn_MoveAdjacentData[0]);
setMovingAdjacentData(btn_MoveAdjacentData[0].getSelection());
}
});
btn_CompileSubfile[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
SubfileCompiler.compile(Project.getFileToEdit());
}
}
});
btn_lineSize1[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
GLPrimitives.SPHERE = GLPrimitives.SPHERE1;
GLPrimitives.SPHERE_INV = GLPrimitives.SPHERE_INV1;
View.lineWidth1000[0] = 25f;
View.lineWidth[0] = .025f;
View.lineWidthGL[0] = .375f;
Set<DatFile> dfs = new HashSet<DatFile>();
for (OpenGLRenderer renderer : renders) {
dfs.add(renderer.getC3D().getLockableDatFileReference());
}
for (DatFile df : dfs) {
SubfileCompiler.compile(df);
}
clickSingleBtn(btn_lineSize1[0]);
}
});
btn_lineSize2[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
GLPrimitives.SPHERE = GLPrimitives.SPHERE2;
GLPrimitives.SPHERE_INV = GLPrimitives.SPHERE_INV2;
View.lineWidth1000[0] = 50f;
View.lineWidth[0] = .050f;
View.lineWidthGL[0] = .75f;
Set<DatFile> dfs = new HashSet<DatFile>();
for (OpenGLRenderer renderer : renders) {
dfs.add(renderer.getC3D().getLockableDatFileReference());
}
for (DatFile df : dfs) {
SubfileCompiler.compile(df);
}
clickSingleBtn(btn_lineSize2[0]);
}
});
btn_lineSize3[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
GLPrimitives.SPHERE = GLPrimitives.SPHERE3;
GLPrimitives.SPHERE_INV = GLPrimitives.SPHERE_INV3;
View.lineWidth1000[0] = 100f;
View.lineWidth[0] = .100f;
View.lineWidthGL[0] = 1.5f;
Set<DatFile> dfs = new HashSet<DatFile>();
for (OpenGLRenderer renderer : renders) {
dfs.add(renderer.getC3D().getLockableDatFileReference());
}
for (DatFile df : dfs) {
SubfileCompiler.compile(df);
}
clickSingleBtn(btn_lineSize3[0]);
}
});
btn_lineSize4[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
GLPrimitives.SPHERE = GLPrimitives.SPHERE4;
GLPrimitives.SPHERE_INV = GLPrimitives.SPHERE_INV4;
View.lineWidth1000[0] = 200f;
View.lineWidth[0] = .200f;
View.lineWidthGL[0] = 3f;
Set<DatFile> dfs = new HashSet<DatFile>();
for (OpenGLRenderer renderer : renders) {
dfs.add(renderer.getC3D().getLockableDatFileReference());
}
for (DatFile df : dfs) {
SubfileCompiler.compile(df);
}
clickSingleBtn(btn_lineSize4[0]);
}
});
btn_BFCswap[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
Project.getFileToEdit().getVertexManager().windingChangeSelection();
}
}
});
btn_RoundSelection[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
if ((e.stateMask & SWT.CTRL) == SWT.CTRL) {
new RoundDialog(getShell()).open();
}
Project.getFileToEdit().getVertexManager()
.roundSelection(WorkbenchManager.getUserSettingState().getCoordsPrecision(), WorkbenchManager.getUserSettingState().getTransMatrixPrecision(), isMovingAdjacentData(), true);
}
}
});
btn_Pipette[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
VertexManager vm = Project.getFileToEdit().getVertexManager();
final GColour gColour2 = vm.getRandomSelectedColour(lastUsedColour);
setLastUsedColour(gColour2);
btn_LastUsedColour[0].removeListener(SWT.Paint, btn_LastUsedColour[0].getListeners(SWT.Paint)[0]);
btn_LastUsedColour[0].removeListener(SWT.Selection, btn_LastUsedColour[0].getListeners(SWT.Selection)[0]);
final Color col = SWTResourceManager.getColor((int) (gColour2.getR() * 255f), (int) (gColour2.getG() * 255f), (int) (gColour2.getB() * 255f));
final Point size = btn_LastUsedColour[0].computeSize(SWT.DEFAULT, SWT.DEFAULT);
final int x = size.x / 4;
final int y = size.y / 4;
final int w = size.x / 2;
final int h = size.y / 2;
int num = gColour2.getColourNumber();
btn_LastUsedColour[0].addPaintListener(new PaintListener() {
@Override
public void paintControl(PaintEvent e) {
e.gc.setBackground(col);
e.gc.fillRectangle(x, y, w, h);
if (gColour2.getA() == 1f) {
e.gc.drawImage(ResourceManager.getImage("icon16_transparent.png"), 0, 0, 16, 16, x, y, w, h); //$NON-NLS-1$
} else {
e.gc.drawImage(ResourceManager.getImage("icon16_halftrans.png"), 0, 0, 16, 16, x, y, w, h); //$NON-NLS-1$
}
}
});
btn_LastUsedColour[0].addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
int num = gColour2.getColourNumber();
if (!View.hasLDConfigColour(num)) {
num = -1;
}
Project.getFileToEdit().getVertexManager().colourChangeSelection(num, gColour2.getR(), gColour2.getG(), gColour2.getB(), gColour2.getA());
}
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {
}
});
if (num != -1) {
btn_LastUsedColour[0].setToolTipText("Colour [" + num + "]: " + View.getLDConfigColourName(num)); //$NON-NLS-1$ //$NON-NLS-2$ I18N
} else {
StringBuilder colourBuilder = new StringBuilder();
colourBuilder.append("0x2"); //$NON-NLS-1$
colourBuilder.append(MathHelper.toHex((int) (255f * gColour2.getR())).toUpperCase());
colourBuilder.append(MathHelper.toHex((int) (255f * gColour2.getG())).toUpperCase());
colourBuilder.append(MathHelper.toHex((int) (255f * gColour2.getB())).toUpperCase());
btn_LastUsedColour[0].setToolTipText("Colour [" + colourBuilder.toString() + "]"); //$NON-NLS-1$ //$NON-NLS-2$ I18N
}
btn_LastUsedColour[0].redraw();
}
}
});
btn_Palette[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
final GColour[] gColour2 = new GColour[1];
new ColourDialog(getShell(), gColour2).open();
if (gColour2[0] != null) {
setLastUsedColour(gColour2[0]);
int num = gColour2[0].getColourNumber();
if (!View.hasLDConfigColour(num)) {
num = -1;
}
Project.getFileToEdit().getVertexManager().colourChangeSelection(num, gColour2[0].getR(), gColour2[0].getG(), gColour2[0].getB(), gColour2[0].getA());
btn_LastUsedColour[0].removeListener(SWT.Paint, btn_LastUsedColour[0].getListeners(SWT.Paint)[0]);
btn_LastUsedColour[0].removeListener(SWT.Selection, btn_LastUsedColour[0].getListeners(SWT.Selection)[0]);
final Color col = SWTResourceManager.getColor((int) (gColour2[0].getR() * 255f), (int) (gColour2[0].getG() * 255f), (int) (gColour2[0].getB() * 255f));
final Point size = btn_LastUsedColour[0].computeSize(SWT.DEFAULT, SWT.DEFAULT);
final int x = size.x / 4;
final int y = size.y / 4;
final int w = size.x / 2;
final int h = size.y / 2;
btn_LastUsedColour[0].addPaintListener(new PaintListener() {
@Override
public void paintControl(PaintEvent e) {
e.gc.setBackground(col);
e.gc.fillRectangle(x, y, w, h);
if (gColour2[0].getA() == 1f) {
e.gc.drawImage(ResourceManager.getImage("icon16_transparent.png"), 0, 0, 16, 16, x, y, w, h); //$NON-NLS-1$
} else {
e.gc.drawImage(ResourceManager.getImage("icon16_halftrans.png"), 0, 0, 16, 16, x, y, w, h); //$NON-NLS-1$
}
}
});
btn_LastUsedColour[0].addSelectionListener(new SelectionListener() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
int num = gColour2[0].getColourNumber();
if (!View.hasLDConfigColour(num)) {
num = -1;
}
Project.getFileToEdit().getVertexManager().colourChangeSelection(num, gColour2[0].getR(), gColour2[0].getG(), gColour2[0].getB(), gColour2[0].getA());
}
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {
}
});
if (num != -1) {
btn_LastUsedColour[0].setToolTipText("Colour [" + num + "]: " + View.getLDConfigColourName(num)); //$NON-NLS-1$ //$NON-NLS-2$ I18N
} else {
StringBuilder colourBuilder = new StringBuilder();
colourBuilder.append("0x2"); //$NON-NLS-1$
colourBuilder.append(MathHelper.toHex((int) (255f * gColour2[0].getR())).toUpperCase());
colourBuilder.append(MathHelper.toHex((int) (255f * gColour2[0].getG())).toUpperCase());
colourBuilder.append(MathHelper.toHex((int) (255f * gColour2[0].getB())).toUpperCase());
btn_LastUsedColour[0].setToolTipText("Colour [" + colourBuilder.toString() + "]"); //$NON-NLS-1$ //$NON-NLS-2$ I18N
}
btn_LastUsedColour[0].redraw();
}
}
}
});
btn_Coarse[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
BigDecimal m = WorkbenchManager.getUserSettingState().getCoarse_move_snap();
BigDecimal r = WorkbenchManager.getUserSettingState().getCoarse_rotate_snap();
BigDecimal s = WorkbenchManager.getUserSettingState().getCoarse_scale_snap();
snapSize = 2;
spn_Move[0].setValue(m);
spn_Rotate[0].setValue(r);
spn_Scale[0].setValue(s);
Manipulator.setSnap(m, r, s);
}
});
btn_Medium[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
BigDecimal m = WorkbenchManager.getUserSettingState().getMedium_move_snap();
BigDecimal r = WorkbenchManager.getUserSettingState().getMedium_rotate_snap();
BigDecimal s = WorkbenchManager.getUserSettingState().getMedium_scale_snap();
snapSize = 1;
spn_Move[0].setValue(m);
spn_Rotate[0].setValue(r);
spn_Scale[0].setValue(s);
Manipulator.setSnap(m, r, s);
}
});
btn_Fine[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
BigDecimal m = WorkbenchManager.getUserSettingState().getFine_move_snap();
BigDecimal r = WorkbenchManager.getUserSettingState().getFine_rotate_snap();
BigDecimal s = WorkbenchManager.getUserSettingState().getFine_scale_snap();
snapSize = 0;
spn_Move[0].setValue(m);
spn_Rotate[0].setValue(r);
spn_Scale[0].setValue(s);
Manipulator.setSnap(m, r, s);
}
});
btn_SplitQuad[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null && !Project.getFileToEdit().isReadOnly()) {
Project.getFileToEdit().getVertexManager().splitQuads(true);
}
}
});
spn_Move[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
BigDecimal m, r, s;
m = spn.getValue();
switch (snapSize) {
case 0:
WorkbenchManager.getUserSettingState().setFine_move_snap(m);
r = WorkbenchManager.getUserSettingState().getFine_rotate_snap();
s = WorkbenchManager.getUserSettingState().getFine_scale_snap();
break;
case 2:
WorkbenchManager.getUserSettingState().setCoarse_move_snap(m);
r = WorkbenchManager.getUserSettingState().getCoarse_rotate_snap();
s = WorkbenchManager.getUserSettingState().getCoarse_scale_snap();
break;
default:
WorkbenchManager.getUserSettingState().setMedium_move_snap(m);
r = WorkbenchManager.getUserSettingState().getMedium_rotate_snap();
s = WorkbenchManager.getUserSettingState().getMedium_scale_snap();
break;
}
Manipulator.setSnap(m, r, s);
}
});
spn_Rotate[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
BigDecimal m, r, s;
r = spn.getValue();
switch (snapSize) {
case 0:
m = WorkbenchManager.getUserSettingState().getFine_move_snap();
WorkbenchManager.getUserSettingState().setFine_rotate_snap(r);
s = WorkbenchManager.getUserSettingState().getFine_scale_snap();
break;
case 2:
m = WorkbenchManager.getUserSettingState().getCoarse_move_snap();
WorkbenchManager.getUserSettingState().setCoarse_rotate_snap(r);
s = WorkbenchManager.getUserSettingState().getCoarse_scale_snap();
break;
default:
m = WorkbenchManager.getUserSettingState().getMedium_move_snap();
WorkbenchManager.getUserSettingState().setMedium_rotate_snap(r);
s = WorkbenchManager.getUserSettingState().getMedium_scale_snap();
break;
}
Manipulator.setSnap(m, r, s);
}
});
spn_Scale[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
BigDecimal m, r, s;
s = spn.getValue();
switch (snapSize) {
case 0:
m = WorkbenchManager.getUserSettingState().getFine_move_snap();
r = WorkbenchManager.getUserSettingState().getFine_rotate_snap();
WorkbenchManager.getUserSettingState().setFine_scale_snap(s);
break;
case 2:
m = WorkbenchManager.getUserSettingState().getCoarse_move_snap();
r = WorkbenchManager.getUserSettingState().getCoarse_rotate_snap();
WorkbenchManager.getUserSettingState().setCoarse_scale_snap(s);
break;
default:
m = WorkbenchManager.getUserSettingState().getMedium_move_snap();
r = WorkbenchManager.getUserSettingState().getMedium_rotate_snap();
WorkbenchManager.getUserSettingState().setMedium_scale_snap(s);
break;
}
Manipulator.setSnap(m, r, s);
}
});
btn_PreviousSelection[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
updatingSelectionTab = true;
NLogger.debug(getClass(), "Previous Selection..."); //$NON-NLS-1$
final DatFile df = Project.getFileToEdit();
if (df != null) {
final VertexManager vm = df.getVertexManager();
final int count = vm.getSelectedData().size();
if (count > 0) {
boolean breakIt = false;
boolean firstRun = true;
while (true) {
int index = vm.getSelectedItemIndex();
index--;
if (index < 0) {
index = count - 1;
if (!firstRun) breakIt = true;
}
firstRun = false;
vm.setSelectedItemIndex(index);
final GData gdata = (GData) vm.getSelectedData().toArray()[index];
if (vm.isNotInSubfileAndLinetype2to5(gdata)) {
vm.setSelectedLine(gdata);
disableSelectionTab();
updatingSelectionTab = true;
switch (gdata.type()) {
case 5:
case 4:
spn_SelectionX4[0].setEnabled(true);
spn_SelectionY4[0].setEnabled(true);
spn_SelectionZ4[0].setEnabled(true);
case 3:
spn_SelectionX3[0].setEnabled(true);
spn_SelectionY3[0].setEnabled(true);
spn_SelectionZ3[0].setEnabled(true);
case 2:
spn_SelectionX1[0].setEnabled(true);
spn_SelectionY1[0].setEnabled(true);
spn_SelectionZ1[0].setEnabled(true);
spn_SelectionX2[0].setEnabled(true);
spn_SelectionY2[0].setEnabled(true);
spn_SelectionZ2[0].setEnabled(true);
txt_Line[0].setText(gdata.toString());
breakIt = true;
switch (gdata.type()) {
case 5:
BigDecimal[] g5 = vm.getPreciseCoordinates(gdata);
spn_SelectionX1[0].setValue(g5[0]);
spn_SelectionY1[0].setValue(g5[1]);
spn_SelectionZ1[0].setValue(g5[2]);
spn_SelectionX2[0].setValue(g5[3]);
spn_SelectionY2[0].setValue(g5[4]);
spn_SelectionZ2[0].setValue(g5[5]);
spn_SelectionX3[0].setValue(g5[6]);
spn_SelectionY3[0].setValue(g5[7]);
spn_SelectionZ3[0].setValue(g5[8]);
spn_SelectionX4[0].setValue(g5[9]);
spn_SelectionY4[0].setValue(g5[10]);
spn_SelectionZ4[0].setValue(g5[11]);
break;
case 4:
BigDecimal[] g4 = vm.getPreciseCoordinates(gdata);
spn_SelectionX1[0].setValue(g4[0]);
spn_SelectionY1[0].setValue(g4[1]);
spn_SelectionZ1[0].setValue(g4[2]);
spn_SelectionX2[0].setValue(g4[3]);
spn_SelectionY2[0].setValue(g4[4]);
spn_SelectionZ2[0].setValue(g4[5]);
spn_SelectionX3[0].setValue(g4[6]);
spn_SelectionY3[0].setValue(g4[7]);
spn_SelectionZ3[0].setValue(g4[8]);
spn_SelectionX4[0].setValue(g4[9]);
spn_SelectionY4[0].setValue(g4[10]);
spn_SelectionZ4[0].setValue(g4[11]);
break;
case 3:
BigDecimal[] g3 = vm.getPreciseCoordinates(gdata);
spn_SelectionX1[0].setValue(g3[0]);
spn_SelectionY1[0].setValue(g3[1]);
spn_SelectionZ1[0].setValue(g3[2]);
spn_SelectionX2[0].setValue(g3[3]);
spn_SelectionY2[0].setValue(g3[4]);
spn_SelectionZ2[0].setValue(g3[5]);
spn_SelectionX3[0].setValue(g3[6]);
spn_SelectionY3[0].setValue(g3[7]);
spn_SelectionZ3[0].setValue(g3[8]);
break;
case 2:
BigDecimal[] g2 = vm.getPreciseCoordinates(gdata);
spn_SelectionX1[0].setValue(g2[0]);
spn_SelectionY1[0].setValue(g2[1]);
spn_SelectionZ1[0].setValue(g2[2]);
spn_SelectionX2[0].setValue(g2[3]);
spn_SelectionY2[0].setValue(g2[4]);
spn_SelectionZ2[0].setValue(g2[5]);
break;
default:
disableSelectionTab();
updatingSelectionTab = true;
break;
}
lbl_SelectionX1[0].setText(I18n.EDITOR3D_PositionX1 + " {" + spn_SelectionX1[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionY1[0].setText(I18n.EDITOR3D_PositionY1 + " {" + spn_SelectionY1[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionZ1[0].setText(I18n.EDITOR3D_PositionZ1 + " {" + spn_SelectionZ1[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionX2[0].setText(I18n.EDITOR3D_PositionX2 + " {" + spn_SelectionX2[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionY2[0].setText(I18n.EDITOR3D_PositionY2 + " {" + spn_SelectionY2[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionZ2[0].setText(I18n.EDITOR3D_PositionZ2 + " {" + spn_SelectionZ2[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionX3[0].setText(I18n.EDITOR3D_PositionX3 + " {" + spn_SelectionX3[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionY3[0].setText(I18n.EDITOR3D_PositionY3 + " {" + spn_SelectionY3[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionZ3[0].setText(I18n.EDITOR3D_PositionZ3 + " {" + spn_SelectionZ3[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionX4[0].setText(I18n.EDITOR3D_PositionX4 + " {" + spn_SelectionX4[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionY4[0].setText(I18n.EDITOR3D_PositionY4 + " {" + spn_SelectionY4[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionZ4[0].setText(I18n.EDITOR3D_PositionZ4 + " {" + spn_SelectionZ4[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionX1[0].getParent().layout();
updatingSelectionTab = false;
break;
default:
disableSelectionTab();
break;
}
} else {
disableSelectionTab();
}
if (breakIt) break;
}
} else {
disableSelectionTab();
}
} else {
disableSelectionTab();
}
updatingSelectionTab = false;
}
});
btn_NextSelection[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
updatingSelectionTab = true;
NLogger.debug(getClass(), "Next Selection..."); //$NON-NLS-1$
final DatFile df = Project.getFileToEdit();
if (df != null) {
final VertexManager vm = df.getVertexManager();
final int count = vm.getSelectedData().size();
if (count > 0) {
boolean breakIt = false;
boolean firstRun = true;
while (true) {
int index = vm.getSelectedItemIndex();
index++;
if (index >= count) {
index = 0;
if (!firstRun) breakIt = true;
}
firstRun = false;
vm.setSelectedItemIndex(index);
final GData gdata = (GData) vm.getSelectedData().toArray()[index];
if (vm.isNotInSubfileAndLinetype2to5(gdata)) {
vm.setSelectedLine(gdata);
disableSelectionTab();
updatingSelectionTab = true;
switch (gdata.type()) {
case 5:
case 4:
spn_SelectionX4[0].setEnabled(true);
spn_SelectionY4[0].setEnabled(true);
spn_SelectionZ4[0].setEnabled(true);
case 3:
spn_SelectionX3[0].setEnabled(true);
spn_SelectionY3[0].setEnabled(true);
spn_SelectionZ3[0].setEnabled(true);
case 2:
spn_SelectionX1[0].setEnabled(true);
spn_SelectionY1[0].setEnabled(true);
spn_SelectionZ1[0].setEnabled(true);
spn_SelectionX2[0].setEnabled(true);
spn_SelectionY2[0].setEnabled(true);
spn_SelectionZ2[0].setEnabled(true);
txt_Line[0].setText(gdata.toString());
breakIt = true;
switch (gdata.type()) {
case 5:
BigDecimal[] g5 = vm.getPreciseCoordinates(gdata);
spn_SelectionX1[0].setValue(g5[0]);
spn_SelectionY1[0].setValue(g5[1]);
spn_SelectionZ1[0].setValue(g5[2]);
spn_SelectionX2[0].setValue(g5[3]);
spn_SelectionY2[0].setValue(g5[4]);
spn_SelectionZ2[0].setValue(g5[5]);
spn_SelectionX3[0].setValue(g5[6]);
spn_SelectionY3[0].setValue(g5[7]);
spn_SelectionZ3[0].setValue(g5[8]);
spn_SelectionX4[0].setValue(g5[9]);
spn_SelectionY4[0].setValue(g5[10]);
spn_SelectionZ4[0].setValue(g5[11]);
break;
case 4:
BigDecimal[] g4 = vm.getPreciseCoordinates(gdata);
spn_SelectionX1[0].setValue(g4[0]);
spn_SelectionY1[0].setValue(g4[1]);
spn_SelectionZ1[0].setValue(g4[2]);
spn_SelectionX2[0].setValue(g4[3]);
spn_SelectionY2[0].setValue(g4[4]);
spn_SelectionZ2[0].setValue(g4[5]);
spn_SelectionX3[0].setValue(g4[6]);
spn_SelectionY3[0].setValue(g4[7]);
spn_SelectionZ3[0].setValue(g4[8]);
spn_SelectionX4[0].setValue(g4[9]);
spn_SelectionY4[0].setValue(g4[10]);
spn_SelectionZ4[0].setValue(g4[11]);
break;
case 3:
BigDecimal[] g3 = vm.getPreciseCoordinates(gdata);
spn_SelectionX1[0].setValue(g3[0]);
spn_SelectionY1[0].setValue(g3[1]);
spn_SelectionZ1[0].setValue(g3[2]);
spn_SelectionX2[0].setValue(g3[3]);
spn_SelectionY2[0].setValue(g3[4]);
spn_SelectionZ2[0].setValue(g3[5]);
spn_SelectionX3[0].setValue(g3[6]);
spn_SelectionY3[0].setValue(g3[7]);
spn_SelectionZ3[0].setValue(g3[8]);
break;
case 2:
BigDecimal[] g2 = vm.getPreciseCoordinates(gdata);
spn_SelectionX1[0].setValue(g2[0]);
spn_SelectionY1[0].setValue(g2[1]);
spn_SelectionZ1[0].setValue(g2[2]);
spn_SelectionX2[0].setValue(g2[3]);
spn_SelectionY2[0].setValue(g2[4]);
spn_SelectionZ2[0].setValue(g2[5]);
break;
default:
disableSelectionTab();
updatingSelectionTab = true;
break;
}
lbl_SelectionX1[0].setText(I18n.EDITOR3D_PositionX1 + " {" + spn_SelectionX1[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionY1[0].setText(I18n.EDITOR3D_PositionY1 + " {" + spn_SelectionY1[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionZ1[0].setText(I18n.EDITOR3D_PositionZ1 + " {" + spn_SelectionZ1[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionX2[0].setText(I18n.EDITOR3D_PositionX2 + " {" + spn_SelectionX2[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionY2[0].setText(I18n.EDITOR3D_PositionY2 + " {" + spn_SelectionY2[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionZ2[0].setText(I18n.EDITOR3D_PositionZ2 + " {" + spn_SelectionZ2[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionX3[0].setText(I18n.EDITOR3D_PositionX3 + " {" + spn_SelectionX3[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionY3[0].setText(I18n.EDITOR3D_PositionY3 + " {" + spn_SelectionY3[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionZ3[0].setText(I18n.EDITOR3D_PositionZ3 + " {" + spn_SelectionZ3[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionX4[0].setText(I18n.EDITOR3D_PositionX4 + " {" + spn_SelectionX4[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionY4[0].setText(I18n.EDITOR3D_PositionY4 + " {" + spn_SelectionY4[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionZ4[0].setText(I18n.EDITOR3D_PositionZ4 + " {" + spn_SelectionZ4[0].getStringValue() + "}"); //$NON-NLS-1$ //$NON-NLS-2$
lbl_SelectionX1[0].getParent().layout();
break;
default:
disableSelectionTab();
break;
}
} else {
disableSelectionTab();
}
if (breakIt) break;
}
} else {
disableSelectionTab();
}
} else {
disableSelectionTab();
}
updatingSelectionTab = false;
}
});
final ValueChangeAdapter va = new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
if (updatingSelectionTab) return;
final GData newLine = Project.getFileToEdit().getVertexManager().updateSelectedLine(
spn_SelectionX1[0].getValue(), spn_SelectionY1[0].getValue(), spn_SelectionZ1[0].getValue(),
spn_SelectionX2[0].getValue(), spn_SelectionY2[0].getValue(), spn_SelectionZ2[0].getValue(),
spn_SelectionX3[0].getValue(), spn_SelectionY3[0].getValue(), spn_SelectionZ3[0].getValue(),
spn_SelectionX4[0].getValue(), spn_SelectionY4[0].getValue(), spn_SelectionZ4[0].getValue(),
btn_MoveAdjacentData2[0].getSelection()
);
if (newLine == null) {
disableSelectionTab();
} else {
txt_Line[0].setText(newLine.toString());
}
}
};
spn_SelectionX1[0].addValueChangeListener(va);
spn_SelectionY1[0].addValueChangeListener(va);
spn_SelectionZ1[0].addValueChangeListener(va);
spn_SelectionX2[0].addValueChangeListener(va);
spn_SelectionY2[0].addValueChangeListener(va);
spn_SelectionZ2[0].addValueChangeListener(va);
spn_SelectionX3[0].addValueChangeListener(va);
spn_SelectionY3[0].addValueChangeListener(va);
spn_SelectionZ3[0].addValueChangeListener(va);
spn_SelectionX4[0].addValueChangeListener(va);
spn_SelectionY4[0].addValueChangeListener(va);
spn_SelectionZ4[0].addValueChangeListener(va);
// treeParts[0].addSelectionListener(new SelectionAdapter() {
// @Override
// public void widgetSelected(final SelectionEvent e) {
//
// }
// });
treeParts[0].addListener(SWT.MouseDown, new Listener() {
@Override
public void handleEvent(Event event) {
if (event.button == MouseButton.RIGHT) {
NLogger.debug(getClass(), "Showing context menu."); //$NON-NLS-1$
try {
if (treeParts[0].getTree().getMenu() != null) {
treeParts[0].getTree().getMenu().dispose();
}
} catch (Exception ex) {}
Menu treeMenu = new Menu(treeParts[0].getTree());
treeParts[0].getTree().setMenu(treeMenu);
mnu_treeMenu[0] = treeMenu;
MenuItem mntmOpenIn3DEditor = new MenuItem(treeMenu, I18n.I18N_NON_BIDIRECT());
mntm_OpenIn3DEditor[0] = mntmOpenIn3DEditor;
mntmOpenIn3DEditor.setEnabled(true);
mntmOpenIn3DEditor.setText("Open In 3D Editor"); //$NON-NLS-1$ I18N Needs translation!
MenuItem mntmOpenInTextEditor = new MenuItem(treeMenu, I18n.I18N_NON_BIDIRECT());
mntm_OpenInTextEditor[0] = mntmOpenInTextEditor;
mntmOpenInTextEditor.setEnabled(true);
mntmOpenInTextEditor.setText("Open In Text Editor"); //$NON-NLS-1$ I18N Needs translation!
@SuppressWarnings("unused")
MenuItem mntm_Separator = new MenuItem(treeMenu, I18n.I18N_NON_BIDIRECT() | SWT.SEPARATOR);
MenuItem mntmRename = new MenuItem(treeMenu, I18n.I18N_NON_BIDIRECT());
mntm_Rename[0] = mntmRename;
mntmRename.setEnabled(true);
mntmRename.setText("Rename / Move"); //$NON-NLS-1$ I18N Needs translation!
MenuItem mntmRevert = new MenuItem(treeMenu, I18n.I18N_NON_BIDIRECT());
mntm_Revert[0] = mntmRevert;
mntmRevert.setEnabled(true);
mntmRevert.setText("Revert All Changes"); //$NON-NLS-1$ I18N Needs translation!
MenuItem mntmDelete = new MenuItem(treeMenu, I18n.I18N_NON_BIDIRECT());
mntm_Delete[0] = mntmDelete;
mntmDelete.setEnabled(true);
mntmDelete.setText("Delete"); //$NON-NLS-1$ I18N Needs translation!
@SuppressWarnings("unused")
MenuItem mntm_Separator2 = new MenuItem(treeMenu, I18n.I18N_NON_BIDIRECT() | SWT.SEPARATOR);
MenuItem mntmCopyToUnofficial = new MenuItem(treeMenu, I18n.I18N_NON_BIDIRECT());
mntm_CopyToUnofficial[0] = mntmCopyToUnofficial;
mntmCopyToUnofficial.setEnabled(true);
mntmCopyToUnofficial.setText("Copy To Unofficial Library"); //$NON-NLS-1$ I18N Needs translation!
mntm_OpenInTextEditor[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (treeParts[0].getSelectionCount() == 1 && treeParts[0].getSelection()[0] != null && treeParts[0].getSelection()[0].getData() instanceof DatFile) {
DatFile df = (DatFile) treeParts[0].getSelection()[0].getData();
for (EditorTextWindow w : Project.getOpenTextWindows()) {
for (CTabItem t : w.getTabFolder().getItems()) {
if (df.equals(((CompositeTab) t).getState().getFileNameObj())) {
w.getTabFolder().setSelection(t);
((CompositeTab) t).getControl().getShell().forceActive();
w.open();
df.getVertexManager().setUpdated(true);
return;
}
}
}
// Project.getParsedFiles().add(df); IS NECESSARY HERE
Project.getParsedFiles().add(df);
new EditorTextWindow().run(df);
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_INFORMATION | SWT.OK);
messageBoxError.setText(I18n.DIALOG_UnavailableTitle);
messageBoxError.setMessage(I18n.DIALOG_Unavailable);
messageBoxError.open();
}
cleanupClosedData();
}
});
mntm_OpenIn3DEditor[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (treeParts[0].getSelectionCount() == 1 && treeParts[0].getSelection()[0] != null && treeParts[0].getSelection()[0].getData() instanceof DatFile) {
if (renders.isEmpty()) {
if ("%EMPTY%".equals(Editor3DWindow.getSashForm().getChildren()[1].getData())) { //$NON-NLS-1$
int[] mainSashWeights = Editor3DWindow.getSashForm().getWeights();
Editor3DWindow.getSashForm().getChildren()[1].dispose();
CompositeContainer cmp_Container = new CompositeContainer(Editor3DWindow.getSashForm(), false);
cmp_Container.moveBelow(Editor3DWindow.getSashForm().getChildren()[0]);
DatFile df = (DatFile) treeParts[0].getSelection()[0].getData();
df.parseForData();
Project.setFileToEdit(df);
cmp_Container.getComposite3D().setLockableDatFileReference(df);
Editor3DWindow.getSashForm().getParent().layout();
Editor3DWindow.getSashForm().setWeights(mainSashWeights);
}
} else {
boolean canUpdate = false;
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (!c3d.isDatFileLockedOnDisplay()) {
canUpdate = true;
break;
}
}
if (canUpdate) {
DatFile df = (DatFile) treeParts[0].getSelection()[0].getData();
final VertexManager vm = df.getVertexManager();
if (vm.isModified()) {
df.setText(df.getText());
}
df.parseForData();
Project.setFileToEdit(df);
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (!c3d.isDatFileLockedOnDisplay()) {
c3d.setLockableDatFileReference(df);
vm.zoomToFit(c3d);
}
}
}
}
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_INFORMATION | SWT.OK);
messageBoxError.setText(I18n.DIALOG_UnavailableTitle);
messageBoxError.setMessage(I18n.DIALOG_Unavailable);
messageBoxError.open();
}
cleanupClosedData();
}
});
mntm_Revert[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (treeParts[0].getSelectionCount() == 1 && treeParts[0].getSelection()[0] != null && treeParts[0].getSelection()[0].getData() instanceof DatFile) {
DatFile df = (DatFile) treeParts[0].getSelection()[0].getData();
if (df.isReadOnly() || !Project.getUnsavedFiles().contains(df) || df.isVirtual() && df.getText().trim().isEmpty()) return;
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_QUESTION | SWT.YES | SWT.NO);
messageBox.setText(I18n.DIALOG_RevertTitle);
Object[] messageArguments = {df.getShortName(), df.getLastSavedOpened()};
MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$
formatter.setLocale(View.LOCALE);
formatter.applyPattern(I18n.DIALOG_Revert);
messageBox.setMessage(formatter.format(messageArguments));
int result = messageBox.open();
if (result == SWT.NO) {
return;
}
boolean canUpdate = false;
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(df)) {
canUpdate = true;
break;
}
}
EditorTextWindow tmpW = null;
CTabItem tmpT = null;
for (EditorTextWindow w : Project.getOpenTextWindows()) {
for (CTabItem t : w.getTabFolder().getItems()) {
if (df.equals(((CompositeTab) t).getState().getFileNameObj())) {
canUpdate = true;
tmpW = w;
tmpT = t;
break;
}
}
}
df.setText(df.getOriginalText());
df.setOldName(df.getNewName());
if (!df.isVirtual()) {
Project.removeUnsavedFile(df);
updateTree_unsavedEntries();
}
if (canUpdate) {
df.parseForData();
df.getVertexManager().setModified(true);
if (tmpW != null) {
tmpW.getTabFolder().setSelection(tmpT);
((CompositeTab) tmpT).getControl().getShell().forceActive();
tmpW.open();
((CompositeTab) tmpT).getTextComposite().forceFocus();
}
}
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_INFORMATION | SWT.OK);
messageBoxError.setText(I18n.DIALOG_UnavailableTitle);
messageBoxError.setMessage(I18n.DIALOG_Unavailable);
messageBoxError.open();
}
}
});
mntm_Delete[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (treeParts[0].getSelectionCount() == 1 && treeParts[0].getSelection()[0] != null && treeParts[0].getSelection()[0].getData() instanceof DatFile) {
DatFile df = (DatFile) treeParts[0].getSelection()[0].getData();
if (df.isReadOnly()) {
if (treeParts[0].getSelection()[0].getParentItem().getParentItem() == treeItem_Project[0]) {
updateTree_removeEntry(df);
cleanupClosedData();
}
return;
}
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_WARNING | SWT.YES | SWT.NO);
messageBox.setText(I18n.DIALOG_DeleteTitle);
Object[] messageArguments = {df.getShortName()};
MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$
formatter.setLocale(View.LOCALE);
formatter.applyPattern(I18n.DIALOG_Delete);
messageBox.setMessage(formatter.format(messageArguments));
int result = messageBox.open();
if (result == SWT.NO) {
return;
}
updateTree_removeEntry(df);
try {
File f = new File(df.getOldName());
if (f.exists()) {
f.delete();
}
} catch (Exception ex) {}
cleanupClosedData();
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_INFORMATION | SWT.OK);
messageBoxError.setText(I18n.DIALOG_UnavailableTitle);
messageBoxError.setMessage(I18n.DIALOG_Unavailable);
messageBoxError.open();
}
}
});
mntm_Rename[0].addSelectionListener(new SelectionAdapter() {
@SuppressWarnings("unchecked")
@Override
public void widgetSelected(SelectionEvent e) {
if (treeParts[0].getSelectionCount() == 1 && treeParts[0].getSelection()[0] != null && treeParts[0].getSelection()[0].getData() instanceof DatFile) {
DatFile df = (DatFile) treeParts[0].getSelection()[0].getData();
if (df.isReadOnly()) return;
FileDialog dlg = new FileDialog(Editor3DWindow.getWindow().getShell(), SWT.SAVE);
File tmp = new File(df.getNewName());
dlg.setFilterPath(tmp.getAbsolutePath().substring(0, tmp.getAbsolutePath().length() - tmp.getName().length()));
dlg.setFileName(tmp.getName());
dlg.setFilterExtensions(new String[]{"*.dat"}); //$NON-NLS-1$
dlg.setOverwrite(true);
// Change the title bar text
dlg.setText(I18n.DIALOG_RenameOrMove);
// Calling open() will open and run the dialog.
// It will return the selected file, or
// null if user cancels
String newPath = dlg.open();
if (newPath != null) {
while (isFileNameAllocated(newPath, df, false)) {
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.RETRY | SWT.CANCEL);
messageBox.setText(I18n.DIALOG_AlreadyAllocatedNameTitle);
messageBox.setMessage(I18n.DIALOG_AlreadyAllocatedName);
int result = messageBox.open();
if (result == SWT.CANCEL) {
return;
}
newPath = dlg.open();
if (newPath == null) return;
}
if (df.isProjectFile() && !newPath.startsWith(Project.getProjectPath())) {
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_WARNING | SWT.YES | SWT.NO);
messageBox.setText(I18n.DIALOG_NoProjectLocationTitle);
Object[] messageArguments = {new File(newPath).getName()};
MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$
formatter.setLocale(View.LOCALE);
formatter.applyPattern(I18n.DIALOG_NoProjectLocation);
messageBox.setMessage(formatter.format(messageArguments));
int result = messageBox.open();
if (result == SWT.NO) {
return;
}
}
df.setNewName(newPath);
if (!df.getOldName().equals(df.getNewName())) {
if (!Project.getUnsavedFiles().contains(df)) {
df.parseForData();
df.getVertexManager().setModified(true);
Project.getUnsavedFiles().add(df);
}
} else {
if (df.getText().equals(df.getOriginalText()) && df.getOldName().equals(df.getNewName())) {
Project.removeUnsavedFile(df);
}
}
df.setProjectFile(df.getNewName().startsWith(Project.getProjectPath()));
HashSet<EditorTextWindow> windows = new HashSet<EditorTextWindow>(Project.getOpenTextWindows());
for (EditorTextWindow win : windows) {
win.updateTabWithDatfile(df);
}
updateTree_renamedEntries();
updateTree_unsavedEntries();
}
} else if (treeParts[0].getSelectionCount() == 1 && treeParts[0].getSelection()[0] != null && treeParts[0].getSelection()[0].equals(treeItem_Project[0])) {
if (Project.isDefaultProject()) {
ProjectActions.createNewProject(Editor3DWindow.getWindow(), true);
} else {
int result = new NewProjectDialog(true).open();
if (result == IDialogConstants.OK_ID && !Project.getTempProjectPath().equals(Project.getProjectPath())) {
try {
while (new File(Project.getTempProjectPath()).isDirectory()) {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.YES | SWT.CANCEL | SWT.NO);
messageBoxError.setText(I18n.PROJECT_ProjectOverwriteTitle);
messageBoxError.setMessage(I18n.PROJECT_ProjectOverwrite);
int result2 = messageBoxError.open();
if (result2 == SWT.CANCEL) {
return;
} else if (result2 == SWT.YES) {
break;
} else {
result = new NewProjectDialog(true).open();
if (result == IDialogConstants.CANCEL_ID) {
return;
}
}
}
Project.copyFolder(new File(Project.getProjectPath()), new File(Project.getTempProjectPath()));
Project.deleteFolder(new File(Project.getProjectPath()));
// Linked project parts need a new path, because they were copied to a new directory
String defaultPrefix = new File(Project.getProjectPath()).getAbsolutePath() + File.separator;
String projectPrefix = new File(Project.getTempProjectPath()).getAbsolutePath() + File.separator;
Editor3DWindow.getWindow().getProjectParts().getParentItem().setData(Project.getTempProjectPath());
HashSet<DatFile> projectFiles = new HashSet<DatFile>();
projectFiles.addAll((ArrayList<DatFile>) Editor3DWindow.getWindow().getProjectParts().getData());
projectFiles.addAll((ArrayList<DatFile>) Editor3DWindow.getWindow().getProjectSubparts().getData());
projectFiles.addAll((ArrayList<DatFile>) Editor3DWindow.getWindow().getProjectPrimitives().getData());
projectFiles.addAll((ArrayList<DatFile>) Editor3DWindow.getWindow().getProjectPrimitives48().getData());
for (DatFile df : projectFiles) {
boolean isUnsaved = Project.getUnsavedFiles().contains(df);
boolean isParsed = Project.getParsedFiles().contains(df);
Project.getParsedFiles().remove(df);
Project.getUnsavedFiles().remove(df);
String newName = df.getNewName();
String oldName = df.getOldName();
df.updateLastModified();
if (!newName.startsWith(projectPrefix) && newName.startsWith(defaultPrefix)) {
df.setNewName(projectPrefix + newName.substring(defaultPrefix.length()));
}
if (!oldName.startsWith(projectPrefix) && oldName.startsWith(defaultPrefix)) {
df.setOldName(projectPrefix + oldName.substring(defaultPrefix.length()));
}
df.setProjectFile(df.getNewName().startsWith(Project.getProjectPath()));
if (isUnsaved) Project.addUnsavedFile(df);
if (isParsed) Project.getParsedFiles().add(df);
}
Project.setProjectName(Project.getTempProjectName());
Project.setProjectPath(Project.getTempProjectPath());
Editor3DWindow.getWindow().getProjectParts().getParentItem().setText(Project.getProjectName());
updateTree_unsavedEntries();
Project.updateEditor();
Editor3DWindow.getWindow().getShell().update();
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
}
}
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_INFORMATION | SWT.OK);
messageBoxError.setText(I18n.DIALOG_UnavailableTitle);
messageBoxError.setMessage(I18n.DIALOG_Unavailable);
messageBoxError.open();
}
}
});
mntm_CopyToUnofficial[0] .addSelectionListener(new SelectionAdapter() {
@SuppressWarnings("unchecked")
@Override
public void widgetSelected(SelectionEvent e) {
if (treeParts[0].getSelectionCount() == 1 && treeParts[0].getSelection()[0] != null && treeParts[0].getSelection()[0].getData() instanceof DatFile) {
DatFile df = (DatFile) treeParts[0].getSelection()[0].getData();
TreeItem p = treeParts[0].getSelection()[0].getParentItem();
String targetPath_u;
String targetPath_l;
String targetPathDir_u;
String targetPathDir_l;
TreeItem targetTreeItem;
boolean projectIsFileOrigin = false;
if (treeItem_ProjectParts[0].equals(p)) {
targetPath_u = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "PARTS"; //$NON-NLS-1$
targetPath_l = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "parts"; //$NON-NLS-1$
targetTreeItem = treeItem_UnofficialParts[0];
projectIsFileOrigin = true;
} else if (treeItem_ProjectPrimitives[0].equals(p)) {
targetPath_u = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "P"; //$NON-NLS-1$
targetPath_l = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "p"; //$NON-NLS-1$
targetTreeItem = treeItem_UnofficialPrimitives[0];
projectIsFileOrigin = true;
} else if (treeItem_ProjectPrimitives48[0].equals(p)) {
targetPath_u = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "P" + File.separator + "48"; //$NON-NLS-1$ //$NON-NLS-2$
targetPath_l = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "p" + File.separator + "48"; //$NON-NLS-1$ //$NON-NLS-2$
targetTreeItem = treeItem_UnofficialPrimitives48[0];
projectIsFileOrigin = true;
} else if (treeItem_ProjectSubparts[0].equals(p)) {
targetPath_u = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "PARTS"+ File.separator + "S"; //$NON-NLS-1$ //$NON-NLS-2$
targetPath_l = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "parts"+ File.separator + "s"; //$NON-NLS-1$ //$NON-NLS-2$
targetTreeItem = treeItem_UnofficialSubparts[0];
projectIsFileOrigin = true;
} else if (treeItem_OfficialParts[0].equals(p)) {
targetPath_u = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "PARTS"; //$NON-NLS-1$
targetPath_l = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "parts"; //$NON-NLS-1$
targetTreeItem = treeItem_UnofficialParts[0];
} else if (treeItem_OfficialPrimitives[0].equals(p)) {
targetPath_u = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "P"; //$NON-NLS-1$
targetPath_l = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "p"; //$NON-NLS-1$
targetTreeItem = treeItem_UnofficialPrimitives[0];
} else if (treeItem_OfficialPrimitives48[0].equals(p)) {
targetPath_u = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "P" + File.separator + "48"; //$NON-NLS-1$ //$NON-NLS-2$
targetPath_l = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "p" + File.separator + "48"; //$NON-NLS-1$ //$NON-NLS-2$
targetTreeItem = treeItem_UnofficialPrimitives48[0];
} else if (treeItem_OfficialSubparts[0].equals(p)) {
targetPath_u = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "PARTS"+ File.separator + "S"; //$NON-NLS-1$ //$NON-NLS-2$
targetPath_l = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "parts"+ File.separator + "s"; //$NON-NLS-1$ //$NON-NLS-2$
targetTreeItem = treeItem_UnofficialSubparts[0];
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_INFORMATION | SWT.OK);
messageBoxError.setText(I18n.DIALOG_UnavailableTitle);
messageBoxError.setMessage(I18n.DIALOG_Unavailable);
messageBoxError.open();
return;
}
targetPathDir_l = targetPath_l;
targetPathDir_u = targetPath_u;
final String newName = new File(df.getNewName()).getName();
targetPath_u = targetPath_u + File.separator + newName;
targetPath_l = targetPath_l + File.separator + newName;
DatFile fileToOverwrite_u = new DatFile(targetPath_u);
DatFile fileToOverwrite_l = new DatFile(targetPath_l);
DatFile targetFile = null;
TreeItem[] folders = new TreeItem[4];
folders[0] = treeItem_UnofficialParts[0];
folders[1] = treeItem_UnofficialPrimitives[0];
folders[2] = treeItem_UnofficialPrimitives48[0];
folders[3] = treeItem_UnofficialSubparts[0];
for (TreeItem folder : folders) {
ArrayList<DatFile> cachedReferences =(ArrayList<DatFile>) folder.getData();
for (DatFile d : cachedReferences) {
if (fileToOverwrite_u.equals(d) || fileToOverwrite_l.equals(d)) {
targetFile = d;
break;
}
}
}
if (new File(targetPath_u).exists() || new File(targetPath_l).exists() || targetFile != null) {
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_WARNING | SWT.OK | SWT.CANCEL);
messageBox.setText(I18n.DIALOG_ReplaceTitle);
Object[] messageArguments = {newName};
MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$
formatter.setLocale(View.LOCALE);
formatter.applyPattern(I18n.DIALOG_Replace);
messageBox.setMessage(formatter.format(messageArguments));
int result = messageBox.open();
if (result == SWT.CANCEL) {
return;
}
}
ArrayList<ArrayList<DatFile>> refResult = null;
if (new File(targetPathDir_l).exists() || new File(targetPathDir_u).exists()) {
if (targetFile == null) {
int result = new CopyDialog(getShell(), new File(df.getNewName()).getName()).open();
switch (result) {
case IDialogConstants.OK_ID:
// Copy File Only
break;
case IDialogConstants.NO_ID:
// Copy File and required and related
if (projectIsFileOrigin) {
refResult = ReferenceParser.checkForReferences(df, References.REQUIRED_AND_RELATED, treeItem_Project[0], treeItem_Unofficial[0], treeItem_Official[0]);
} else {
refResult = ReferenceParser.checkForReferences(df, References.REQUIRED_AND_RELATED, treeItem_Official[0], treeItem_Unofficial[0], treeItem_Project[0]);
}
break;
case IDialogConstants.YES_ID:
// Copy File and required
if (projectIsFileOrigin) {
refResult = ReferenceParser.checkForReferences(df, References.REQUIRED, treeItem_Project[0], treeItem_Unofficial[0], treeItem_Official[0]);
} else {
refResult = ReferenceParser.checkForReferences(df, References.REQUIRED, treeItem_Official[0], treeItem_Unofficial[0], treeItem_Project[0]);
}
break;
default:
return;
}
DatFile newDatFile = new DatFile(new File(targetPathDir_l).exists() ? targetPath_l : targetPath_u);
// Text exchange includes description exchange
newDatFile.setText(df.getText());
newDatFile.saveForced();
newDatFile.setType(df.getType());
((ArrayList<DatFile>) targetTreeItem.getData()).add(newDatFile);
TreeItem ti = new TreeItem(targetTreeItem, SWT.NONE);
ti.setText(new File(df.getNewName()).getName());
ti.setData(newDatFile);
} else if (targetFile.equals(df)) { // This can only happen if the user opens the unofficial parts folder as a project
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.OK);
messageBox.setText(I18n.DIALOG_AlreadyAllocatedNameTitle);
messageBox.setMessage(I18n.DIALOG_AlreadyAllocatedName);
messageBox.open();
return;
} else {
int result = new CopyDialog(getShell(), new File(df.getNewName()).getName()).open();
switch (result) {
case IDialogConstants.OK_ID:
// Copy File Only
break;
case IDialogConstants.NO_ID:
// Copy File and required and related
if (projectIsFileOrigin) {
refResult = ReferenceParser.checkForReferences(df, References.REQUIRED_AND_RELATED, treeItem_Project[0], treeItem_Unofficial[0], treeItem_Official[0]);
} else {
refResult = ReferenceParser.checkForReferences(df, References.REQUIRED_AND_RELATED, treeItem_Official[0], treeItem_Unofficial[0], treeItem_Project[0]);
}
break;
case IDialogConstants.YES_ID:
// Copy File and required
if (projectIsFileOrigin) {
refResult = ReferenceParser.checkForReferences(df, References.REQUIRED, treeItem_Project[0], treeItem_Unofficial[0], treeItem_Official[0]);
} else {
refResult = ReferenceParser.checkForReferences(df, References.REQUIRED, treeItem_Official[0], treeItem_Unofficial[0], treeItem_Project[0]);
}
break;
default:
return;
}
targetFile.disposeData();
updateTree_removeEntry(targetFile);
DatFile newDatFile = new DatFile(new File(targetPathDir_l).exists() ? targetPath_l : targetPath_u);
newDatFile.setText(df.getText());
newDatFile.saveForced();
((ArrayList<DatFile>) targetTreeItem.getData()).add(newDatFile);
TreeItem ti = new TreeItem(targetTreeItem, SWT.NONE);
ti.setText(new File(df.getNewName()).getName());
ti.setData(newDatFile);
}
if (refResult != null) {
// Remove old data
for(int i = 0; i < 4; i++) {
ArrayList<DatFile> toRemove = refResult.get(i);
for (DatFile datToRemove : toRemove) {
datToRemove.disposeData();
updateTree_removeEntry(datToRemove);
}
}
// Create new data
TreeItem[] targetTrees = new TreeItem[]{treeItem_UnofficialParts[0], treeItem_UnofficialSubparts[0], treeItem_UnofficialPrimitives[0], treeItem_UnofficialPrimitives48[0]};
for(int i = 4; i < 8; i++) {
ArrayList<DatFile> toCreate = refResult.get(i);
for (DatFile datToCreate : toCreate) {
DatFile newDatFile = new DatFile(datToCreate.getOldName());
String source = datToCreate.getTextDirect();
newDatFile.setText(source);
newDatFile.setOriginalText(source);
newDatFile.saveForced();
newDatFile.setType(datToCreate.getType());
((ArrayList<DatFile>) targetTrees[i - 4].getData()).add(newDatFile);
TreeItem ti = new TreeItem(targetTrees[i - 4], SWT.NONE);
ti.setText(new File(datToCreate.getOldName()).getName());
ti.setData(newDatFile);
}
}
}
updateTree_unsavedEntries();
}
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_INFORMATION | SWT.OK);
messageBoxError.setText(I18n.DIALOG_UnavailableTitle);
messageBoxError.setMessage(I18n.DIALOG_Unavailable);
messageBoxError.open();
}
}
});
java.awt.Point b = java.awt.MouseInfo.getPointerInfo().getLocation();
final int x = (int) b.getX();
final int y = (int) b.getY();
Menu menu = mnu_treeMenu[0];
menu.setLocation(x, y);
menu.setVisible(true);
}
}
});
treeParts[0].addListener(SWT.MouseDoubleClick, new Listener() {
@Override
public void handleEvent(Event event) {
if (treeParts[0].getSelectionCount() == 1 && treeParts[0].getSelection()[0] != null) {
treeParts[0].getSelection()[0].setVisible(!treeParts[0].getSelection()[0].isVisible());
TreeItem sel = treeParts[0].getSelection()[0];
sh.getDisplay().asyncExec(new Runnable() {
@Override
public void run() {
treeParts[0].build();
}
});
treeParts[0].redraw();
treeParts[0].update();
treeParts[0].getTree().select(treeParts[0].getMapInv().get(sel));
}
}
});
txt_Search[0].addModifyListener(new ModifyListener() {
@Override
public void modifyText(ModifyEvent e) {
search(txt_Search[0].getText());
}
});
btn_ResetSearch[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
txt_Search[0].setText(""); //$NON-NLS-1$
}
});
btn_Hide[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) Project.getFileToEdit().getVertexManager().hideSelection();
}
});
btn_ShowAll[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) Project.getFileToEdit().getVertexManager().showAll();
}
});
btn_NoTransparentSelection[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
setNoTransparentSelection(btn_NoTransparentSelection[0].getSelection());
}
});
btn_BFCToggle[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
setBfcToggle(btn_BFCToggle[0].getSelection());
}
});
btn_Delete[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) Project.getFileToEdit().getVertexManager().delete(Editor3DWindow.getWindow().isMovingAdjacentData(), true);
}
});
btn_Copy[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) Project.getFileToEdit().getVertexManager().copy();
}
});
btn_Cut[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
Project.getFileToEdit().getVertexManager().copy();
Project.getFileToEdit().getVertexManager().delete(false, true);
}
}
});
btn_Paste[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) Project.getFileToEdit().getVertexManager().paste();
}
});
btn_Manipulator_0_toOrigin[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
c3d.getManipulator().reset();
}
}
}
}
});
btn_Manipulator_XIII_toWorld[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Vector4f t = new Vector4f(c3d.getManipulator().getPosition());
BigDecimal[] T = c3d.getManipulator().getAccuratePosition();
c3d.getManipulator().reset();
c3d.getManipulator().getPosition().set(t);
c3d.getManipulator().setAccuratePosition(T[0], T[1], T[2]);
;
}
}
}
}
});
btn_Manipulator_X_XReverse[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Vector4f.sub(new Vector4f(0f, 0f, 0f, 2f), c3d.getManipulator().getXaxis(), c3d.getManipulator().getXaxis());
BigDecimal[] a = c3d.getManipulator().getAccurateXaxis();
c3d.getManipulator().setAccurateXaxis(a[0].negate(), a[1].negate(), a[2].negate());
}
}
}
});
btn_Manipulator_XI_YReverse[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Vector4f.sub(new Vector4f(0f, 0f, 0f, 2f), c3d.getManipulator().getYaxis(), c3d.getManipulator().getYaxis());
BigDecimal[] a = c3d.getManipulator().getAccurateYaxis();
c3d.getManipulator().setAccurateYaxis(a[0].negate(), a[1].negate(), a[2].negate());
}
}
}
});
btn_Manipulator_XII_ZReverse[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Vector4f.sub(new Vector4f(0f, 0f, 0f, 2f), c3d.getManipulator().getZaxis(), c3d.getManipulator().getZaxis());
BigDecimal[] a = c3d.getManipulator().getAccurateZaxis();
c3d.getManipulator().setAccurateZaxis(a[0].negate(), a[1].negate(), a[2].negate());
}
}
}
});
btn_Manipulator_SwitchXY[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Vector4f temp = new Vector4f(c3d.getManipulator().getXaxis());
c3d.getManipulator().getXaxis().set(c3d.getManipulator().getYaxis());
c3d.getManipulator().getYaxis().set(temp);
BigDecimal[] a = c3d.getManipulator().getAccurateXaxis().clone();
BigDecimal[] b = c3d.getManipulator().getAccurateYaxis().clone();
c3d.getManipulator().setAccurateXaxis(b[0], b[1], b[2]);
c3d.getManipulator().setAccurateYaxis(a[0], a[1], a[2]);
}
}
}
});
btn_Manipulator_SwitchXZ[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Vector4f temp = new Vector4f(c3d.getManipulator().getXaxis());
c3d.getManipulator().getXaxis().set(c3d.getManipulator().getZaxis());
c3d.getManipulator().getZaxis().set(temp);
BigDecimal[] a = c3d.getManipulator().getAccurateXaxis().clone();
BigDecimal[] b = c3d.getManipulator().getAccurateZaxis().clone();
c3d.getManipulator().setAccurateXaxis(b[0], b[1], b[2]);
c3d.getManipulator().setAccurateZaxis(a[0], a[1], a[2]);
}
}
}
});
btn_Manipulator_SwitchYZ[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Vector4f temp = new Vector4f(c3d.getManipulator().getZaxis());
c3d.getManipulator().getZaxis().set(c3d.getManipulator().getYaxis());
c3d.getManipulator().getYaxis().set(temp);
BigDecimal[] a = c3d.getManipulator().getAccurateYaxis().clone();
BigDecimal[] b = c3d.getManipulator().getAccurateZaxis().clone();
c3d.getManipulator().setAccurateYaxis(b[0], b[1], b[2]);
c3d.getManipulator().setAccurateZaxis(a[0], a[1], a[2]);
}
}
}
});
btn_Manipulator_1_cameraToPos[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
Vector4f pos = c3d.getManipulator().getPosition();
Vector4f a1 = c3d.getManipulator().getXaxis();
Vector4f a2 = c3d.getManipulator().getYaxis();
Vector4f a3 = c3d.getManipulator().getZaxis();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Matrix4f rot = new Matrix4f();
Matrix4f.setIdentity(rot);
rot.m00 = a1.x;
rot.m10 = a1.y;
rot.m20 = a1.z;
rot.m01 = a2.x;
rot.m11 = a2.y;
rot.m21 = a2.z;
rot.m02 = a3.x;
rot.m12 = a3.y;
rot.m22 = a3.z;
c3d.getRotation().load(rot);
Matrix4f trans = new Matrix4f();
Matrix4f.setIdentity(trans);
trans.translate(new Vector3f(-pos.x, -pos.y, -pos.z));
c3d.getTranslation().load(trans);
c3d.getPerspectiveCalculator().calculateOriginData();
}
}
}
});
btn_Manipulator_2_toAverage[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
Vector4f avg = Project.getFileToEdit().getVertexManager().getSelectionCenter();
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
c3d.getManipulator().getPosition().set(avg.x, avg.y, avg.z, 1f);
c3d.getManipulator().setAccuratePosition(new BigDecimal(avg.x / 1000f), new BigDecimal(avg.y / 1000f), new BigDecimal(avg.z / 1000f));
}
}
}
}
});
btn_Manipulator_3_toSubfile[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
Set<GData1> subfiles = Project.getFileToEdit().getVertexManager().getSelectedSubfiles();
if (!subfiles.isEmpty()) {
GData1 subfile = null;
for (GData1 g1 : subfiles) {
subfile = g1;
break;
}
Matrix4f m = subfile.getProductMatrix();
Matrix M = subfile.getAccurateProductMatrix();
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
c3d.getManipulator().getPosition().set(m.m30, m.m31, m.m32, 1f);
c3d.getManipulator().setAccuratePosition(M.M30, M.M31, M.M32);
Vector3f x = new Vector3f(m.m00, m.m01, m.m02);
x.normalise();
Vector3f y = new Vector3f(m.m10, m.m11, m.m12);
y.normalise();
Vector3f z = new Vector3f(m.m20, m.m21, m.m22);
z.normalise();
c3d.getManipulator().getXaxis().set(x.x, x.y, x.z, 1f);
c3d.getManipulator().getYaxis().set(y.x, y.y, y.z, 1f);
c3d.getManipulator().getZaxis().set(z.x, z.y, z.z, 1f);
c3d.getManipulator().setAccurateXaxis(new BigDecimal(c3d.getManipulator().getXaxis().x), new BigDecimal(c3d.getManipulator().getXaxis().y),
new BigDecimal(c3d.getManipulator().getXaxis().z));
c3d.getManipulator().setAccurateYaxis(new BigDecimal(c3d.getManipulator().getYaxis().x), new BigDecimal(c3d.getManipulator().getYaxis().y),
new BigDecimal(c3d.getManipulator().getYaxis().z));
c3d.getManipulator().setAccurateZaxis(new BigDecimal(c3d.getManipulator().getZaxis().x), new BigDecimal(c3d.getManipulator().getZaxis().y),
new BigDecimal(c3d.getManipulator().getZaxis().z));
}
}
}
}
}
});
btn_Manipulator_32_subfileTo[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (Project.getFileToEdit() != null) {
VertexManager vm = Project.getFileToEdit().getVertexManager();
Set<GData1> subfiles = vm.getSelectedSubfiles();
if (!subfiles.isEmpty()) {
GData1 subfile = null;
for (GData1 g1 : subfiles) {
if (vm.getLineLinkedToVertices().containsKey(g1)) {
subfile = g1;
break;
}
}
if (subfile == null) {
return;
}
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Manipulator ma = c3d.getManipulator();
vm.transformSubfile(subfile, ma.getAccurateMatrix(), true, true);
}
}
}
}
}
});
btn_Manipulator_4_toVertex[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
float minDist = Float.MAX_VALUE;
Vector4f next = new Vector4f(c3d.getManipulator().getPosition());
Vector4f min = new Vector4f(c3d.getManipulator().getPosition());
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
Set<Vertex> vertices;
if (vm.getSelectedVertices().isEmpty()) {
vertices = vm.getVertices();
} else {
vertices = vm.getSelectedVertices();
}
Vertex minVertex = new Vertex(0f, 0f, 0f);
for (Vertex vertex : vertices) {
Vector4f sub = Vector4f.sub(next, vertex.toVector4f(), null);
float d2 = sub.lengthSquared();
if (d2 < minDist) {
minVertex = vertex;
minDist = d2;
min = vertex.toVector4f();
}
}
c3d.getManipulator().getPosition().set(min.x, min.y, min.z, 1f);
c3d.getManipulator().setAccuratePosition(minVertex.X, minVertex.Y, minVertex.Z);
}
}
}
});
btn_Manipulator_5_toEdge[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Vector4f min = new Vector4f(c3d.getManipulator().getPosition());
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
min = vm.getMinimalDistanceVertexToLines(new Vertex(c3d.getManipulator().getPosition())).toVector4f();
c3d.getManipulator().getPosition().set(min.x, min.y, min.z, 1f);
c3d.getManipulator().setAccuratePosition(new BigDecimal(min.x / 1000f), new BigDecimal(min.y / 1000f), new BigDecimal(min.z / 1000f));
}
}
}
});
btn_Manipulator_6_toSurface[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Vector4f min = new Vector4f(c3d.getManipulator().getPosition());
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
min = vm.getMinimalDistanceVertexToSurfaces(new Vertex(c3d.getManipulator().getPosition())).toVector4f();
c3d.getManipulator().getPosition().set(min.x, min.y, min.z, 1f);
c3d.getManipulator().setAccuratePosition(new BigDecimal(min.x / 1000f), new BigDecimal(min.y / 1000f), new BigDecimal(min.z / 1000f));
}
}
}
});
btn_Manipulator_7_toVertexNormal[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
float minDist = Float.MAX_VALUE;
Vector4f next = new Vector4f(c3d.getManipulator().getPosition());
Vertex min = null;
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
Set<Vertex> vertices;
if (vm.getSelectedVertices().isEmpty()) {
vertices = vm.getVertices();
} else {
vertices = vm.getSelectedVertices();
}
for (Vertex vertex : vertices) {
Vector4f sub = Vector4f.sub(next, vertex.toVector4f(), null);
float d2 = sub.lengthSquared();
if (d2 < minDist) {
minDist = d2;
min = vertex;
}
}
vm = c3d.getLockableDatFileReference().getVertexManager();
Vector4f n = vm.getVertexNormal(min);
float tx = 1f;
float ty = 0f;
float tz = 0f;
if (n.x <= 0f) {
tx = -1;
}
if (Math.abs(Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(0f, 0f, tx), null).length()) > .00001f) {
tz = tx;
tx = 0f;
ty = 0f;
} else if (Math.abs(Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(tx, 0f, 0f), null).length()) > .00001f) {
// ty = 0f;
// tz = 0f;
} else if (Math.abs(Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(0f, 0f, tx), null).length()) > .00001f) {
ty = tx;
tx = 0f;
tz = 0f;
} else {
return;
}
Vector3f cross = (Vector3f) Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(tx, ty, tz), null).normalise();
c3d.getManipulator().getZaxis().set(n.x, n.y, n.z, 1f);
c3d.getManipulator().getXaxis().set(cross.x, cross.y, cross.z, 1f);
Vector4f zaxis = c3d.getManipulator().getZaxis();
Vector4f xaxis = c3d.getManipulator().getXaxis();
cross = Vector3f.cross(new Vector3f(xaxis.x, xaxis.y, xaxis.z), new Vector3f(zaxis.x, zaxis.y, zaxis.z), null);
c3d.getManipulator().getYaxis().set(cross.x, cross.y, cross.z, 1f);
c3d.getManipulator().setAccurateXaxis(new BigDecimal(c3d.getManipulator().getXaxis().x), new BigDecimal(c3d.getManipulator().getXaxis().y),
new BigDecimal(c3d.getManipulator().getXaxis().z));
c3d.getManipulator().setAccurateYaxis(new BigDecimal(c3d.getManipulator().getYaxis().x), new BigDecimal(c3d.getManipulator().getYaxis().y),
new BigDecimal(c3d.getManipulator().getYaxis().z));
c3d.getManipulator().setAccurateZaxis(new BigDecimal(c3d.getManipulator().getZaxis().x), new BigDecimal(c3d.getManipulator().getZaxis().y),
new BigDecimal(c3d.getManipulator().getZaxis().z));
}
}
}
});
btn_Manipulator_8_toEdgeNormal[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
Vector4f n = vm.getMinimalDistanceEdgeNormal(new Vertex(c3d.getManipulator().getPosition()));
float tx = 1f;
float ty = 0f;
float tz = 0f;
if (n.x <= 0f) {
tx = -1;
}
if (Math.abs(Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(0f, 0f, tx), null).length()) > .00001f) {
tz = tx;
tx = 0f;
ty = 0f;
} else if (Math.abs(Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(tx, 0f, 0f), null).length()) > .00001f) {
// ty = 0f;
// tz = 0f;
} else if (Math.abs(Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(0f, 0f, tx), null).length()) > .00001f) {
ty = tx;
tx = 0f;
tz = 0f;
} else {
return;
}
Vector3f cross = (Vector3f) Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(tx, ty, tz), null).normalise();
c3d.getManipulator().getZaxis().set(n.x, n.y, n.z, 1f);
c3d.getManipulator().getXaxis().set(cross.x, cross.y, cross.z, 1f);
Vector4f zaxis = c3d.getManipulator().getZaxis();
Vector4f xaxis = c3d.getManipulator().getXaxis();
cross = Vector3f.cross(new Vector3f(xaxis.x, xaxis.y, xaxis.z), new Vector3f(zaxis.x, zaxis.y, zaxis.z), null);
c3d.getManipulator().getYaxis().set(cross.x, cross.y, cross.z, 1f);
c3d.getManipulator().setAccurateXaxis(new BigDecimal(c3d.getManipulator().getXaxis().x), new BigDecimal(c3d.getManipulator().getXaxis().y),
new BigDecimal(c3d.getManipulator().getXaxis().z));
c3d.getManipulator().setAccurateYaxis(new BigDecimal(c3d.getManipulator().getYaxis().x), new BigDecimal(c3d.getManipulator().getYaxis().y),
new BigDecimal(c3d.getManipulator().getYaxis().z));
c3d.getManipulator().setAccurateZaxis(new BigDecimal(c3d.getManipulator().getZaxis().x), new BigDecimal(c3d.getManipulator().getZaxis().y),
new BigDecimal(c3d.getManipulator().getZaxis().z));
}
}
}
});
btn_Manipulator_9_toSurfaceNormal[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
Vector4f n = vm.getMinimalDistanceSurfaceNormal(new Vertex(c3d.getManipulator().getPosition()));
float tx = 1f;
float ty = 0f;
float tz = 0f;
if (n.x <= 0f) {
tx = -1;
}
if (Math.abs(Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(0f, 0f, tx), null).length()) > .00001f) {
tz = tx;
tx = 0f;
ty = 0f;
} else if (Math.abs(Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(tx, 0f, 0f), null).length()) > .00001f) {
// ty = 0f;
// tz = 0f;
} else if (Math.abs(Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(0f, 0f, tx), null).length()) > .00001f) {
ty = tx;
tx = 0f;
tz = 0f;
} else {
return;
}
Vector3f cross = (Vector3f) Vector3f.cross(new Vector3f(n.x, n.y, n.z), new Vector3f(tx, ty, tz), null).normalise();
c3d.getManipulator().getZaxis().set(n.x, n.y, n.z, 1f);
c3d.getManipulator().getXaxis().set(cross.x, cross.y, cross.z, 1f);
Vector4f zaxis = c3d.getManipulator().getZaxis();
Vector4f xaxis = c3d.getManipulator().getXaxis();
cross = Vector3f.cross(new Vector3f(xaxis.x, xaxis.y, xaxis.z), new Vector3f(zaxis.x, zaxis.y, zaxis.z), null);
c3d.getManipulator().getYaxis().set(cross.x, cross.y, cross.z, 1f);
c3d.getManipulator().setAccurateXaxis(new BigDecimal(c3d.getManipulator().getXaxis().x), new BigDecimal(c3d.getManipulator().getXaxis().y),
new BigDecimal(c3d.getManipulator().getXaxis().z));
c3d.getManipulator().setAccurateYaxis(new BigDecimal(c3d.getManipulator().getYaxis().x), new BigDecimal(c3d.getManipulator().getYaxis().y),
new BigDecimal(c3d.getManipulator().getYaxis().z));
c3d.getManipulator().setAccurateZaxis(new BigDecimal(c3d.getManipulator().getZaxis().x), new BigDecimal(c3d.getManipulator().getZaxis().y),
new BigDecimal(c3d.getManipulator().getZaxis().z));
}
}
}
});
btn_Manipulator_XIV_adjustRotationCenter[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.adjustRotationCenter(c3d, null);
}
}
}
});
mntm_SelectAll[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
loadSelectorSettings();
vm.selectAll(sels, true);
vm.syncWithTextEditors();
return;
}
}
}
});
mntm_SelectAllVisible[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
loadSelectorSettings();
vm.selectAll(sels, false);
vm.syncWithTextEditors();
return;
}
}
}
});
mntm_SelectAllWithColours[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
loadSelectorSettings();
vm.selectAllWithSameColours(sels, true);
vm.syncWithTextEditors();
return;
}
}
}
});
mntm_SelectAllVisibleWithColours[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
loadSelectorSettings();
vm.selectAllWithSameColours(sels, false);
vm.syncWithTextEditors();
return;
}
}
}
});
mntm_SelectNone[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.clearSelection();
vm.syncWithTextEditors();
return;
}
}
}
});
mntm_SelectInverse[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
loadSelectorSettings();
vm.selectInverse(sels);
vm.syncWithTextEditors();
return;
}
}
}
});
mntm_WithSameColour[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
mntm_SelectEverything[0].setEnabled(
mntm_WithHiddenData[0].getSelection() ||
mntm_WithSameColour[0].getSelection() ||
mntm_WithSameOrientation[0].getSelection() ||
mntm_ExceptSubfiles[0].getSelection()
);
showSelectMenu();
}
});
}
});
mntm_WithSameOrientation[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
mntm_SelectEverything[0].setEnabled(
mntm_WithHiddenData[0].getSelection() ||
mntm_WithSameColour[0].getSelection() ||
mntm_WithSameOrientation[0].getSelection() ||
mntm_ExceptSubfiles[0].getSelection()
);
if (mntm_WithSameOrientation[0].getSelection()) {
new ValueDialog(getShell(), "Set angular surface normal difference:", "Threshold in degree [°], range from -90 to 180.\nNegative values do not care about the surface winding,\nwhile positive do.") { //$NON-NLS-1$ //$NON-NLS-2$ I18N
@Override
public void initializeSpinner() {
this.spn_Value[0].setMinimum(new BigDecimal("-90")); //$NON-NLS-1$
this.spn_Value[0].setMaximum(new BigDecimal("180")); //$NON-NLS-1$
this.spn_Value[0].setValue(sels.getAngle());
}
@Override
public void applyValue() {
sels.setAngle(this.spn_Value[0].getValue());
}
}.open();
}
showSelectMenu();
}
});
}
});
mntm_WithAccuracy[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
mntm_SelectEverything[0].setEnabled(
mntm_WithHiddenData[0].getSelection() ||
mntm_WithSameColour[0].getSelection() ||
mntm_WithSameOrientation[0].getSelection() ||
mntm_ExceptSubfiles[0].getSelection()
);
if (mntm_WithAccuracy[0].getSelection()) {
new ValueDialog(getShell(), "Set accuracy:", "Threshold in LDU, range from 0 to 1000.\nControls the maximum distance between two points that the process will consider matching") { //$NON-NLS-1$ //$NON-NLS-2$ I18N
@Override
public void initializeSpinner() {
this.spn_Value[0].setMinimum(new BigDecimal("0")); //$NON-NLS-1$
this.spn_Value[0].setMaximum(new BigDecimal("1000")); //$NON-NLS-1$
this.spn_Value[0].setValue(sels.getEqualDistance());
}
@Override
public void applyValue() {
sels.setEqualDistance(this.spn_Value[0].getValue());
}
}.open();
}
showSelectMenu();
}
});
}
});
mntm_WithHiddenData[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
mntm_SelectEverything[0].setEnabled(
mntm_WithHiddenData[0].getSelection() ||
mntm_WithSameColour[0].getSelection() ||
mntm_WithSameOrientation[0].getSelection() ||
mntm_ExceptSubfiles[0].getSelection()
);
showSelectMenu();
}
});
}
});
mntm_WithWholeSubfiles[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
showSelectMenu();
}
});
}
});
mntm_ExceptSubfiles[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
mntm_SelectEverything[0].setEnabled(
mntm_WithHiddenData[0].getSelection() ||
mntm_WithSameColour[0].getSelection() ||
mntm_WithSameOrientation[0].getSelection() ||
mntm_ExceptSubfiles[0].getSelection()
);
mntm_WithWholeSubfiles[0].setEnabled(!mntm_ExceptSubfiles[0].getSelection());
showSelectMenu();
}
});
}
});
mntm_StopAtEdges[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
showSelectMenu();
}
});
}
});
mntm_STriangles[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
showSelectMenu();
}
});
}
});
mntm_SQuads[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
showSelectMenu();
}
});
}
});
mntm_SCLines[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
showSelectMenu();
}
});
}
});
mntm_SVertices[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
showSelectMenu();
}
});
}
});
mntm_SLines[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
showSelectMenu();
}
});
}
});
mntm_SelectEverything[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
sels.setScope(SelectorSettings.EVERYTHING);
loadSelectorSettings();
vm.selector(sels);
vm.syncWithTextEditors();
}
}
}
});
mntm_SelectConnected[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
sels.setScope(SelectorSettings.CONNECTED);
loadSelectorSettings();
vm.selector(sels);
vm.syncWithTextEditors();
}
}
}
});
mntm_SelectTouching[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
sels.setScope(SelectorSettings.TOUCHING);
loadSelectorSettings();
vm.selector(sels);
vm.syncWithTextEditors();
}
}
}
});
mntm_SelectIsolatedVertices[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.selectIsolatedVertices();
vm.syncWithTextEditors();
}
}
}
});
}
});
mntm_Split[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.split(2);
}
}
}
});
}
});
mntm_SplitNTimes[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Display.getCurrent().asyncExec(new Runnable() {
@Override
public void run() {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
final int[] frac = new int[]{2};
if (new ValueDialogInt(getShell(), "Split edges:", "(Number of resulting fractions)") { //$NON-NLS-1$ //$NON-NLS-2$ I18N
@Override
public void initializeSpinner() {
this.spn_Value[0].setMinimum(2);
this.spn_Value[0].setMaximum(1000);
this.spn_Value[0].setValue(2);
}
@Override
public void applyValue() {
frac[0] = this.spn_Value[0].getValue();
}
}.open() == OK) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.split(frac[0]);
}
}
}
}
});
}
});
mntm_MergeToAverage[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.merge(MergeTo.AVERAGE, true);
return;
}
}
}
});
mntm_MergeToLastSelected[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.merge(MergeTo.LAST_SELECTED, true);
return;
}
}
}
});
mntm_MergeToNearestVertex[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.merge(MergeTo.NEAREST_VERTEX, true);
return;
}
}
}
});
mntm_MergeToNearestEdge[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.merge(MergeTo.NEAREST_EDGE, true);
return;
}
}
}
});
mntm_MergeToNearestFace[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.merge(MergeTo.NEAREST_FACE, true);
return;
}
}
}
});
mntm_setXYZ[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
Vertex v = null;
final VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
final Set<Vertex> sv = vm.getSelectedVertices();
if (sv.size() == 1) {
v = sv.iterator().next();
}
if (new CoordinatesDialog(getShell(), v).open() == IDialogConstants.OK_ID) {
vm.setXyzOrTranslateOrTransform(CoordinatesDialog.getVertex(), null, TransformationMode.SET, CoordinatesDialog.isX(), CoordinatesDialog.isY(), CoordinatesDialog.isZ(), true);
}
return;
}
}
}
});
mntm_Translate[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
if (new TranslateDialog(getShell(), null).open() == IDialogConstants.OK_ID) {
c3d.getLockableDatFileReference().getVertexManager().setXyzOrTranslateOrTransform(TranslateDialog.getOffset(), null, TransformationMode.TRANSLATE, TranslateDialog.isX(), TranslateDialog.isY(), TranslateDialog.isZ(), true);
}
return;
}
}
}
});
mntm_Rotate[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
TreeSet<Vertex> clipboard = new TreeSet<Vertex>();
if (VertexManager.getClipboard().size() == 1) {
GData vertex = VertexManager.getClipboard().get(0);
if (vertex.type() == 0) {
String line = vertex.toString();
line = line.replaceAll("\\s+", " ").trim(); //$NON-NLS-1$ //$NON-NLS-2$
String[] data_segments = line.split("\\s+"); //$NON-NLS-1$
if (line.startsWith("0 !LPE")) { //$NON-NLS-1$
if (line.startsWith("VERTEX ", 7)) { //$NON-NLS-1$
Vector3d start = new Vector3d();
boolean numberError = false;
if (data_segments.length == 6) {
try {
start.setX(new BigDecimal(data_segments[3], Threshold.mc));
} catch (NumberFormatException nfe) {
numberError = true;
}
try {
start.setY(new BigDecimal(data_segments[4], Threshold.mc));
} catch (NumberFormatException nfe) {
numberError = true;
}
try {
start.setZ(new BigDecimal(data_segments[5], Threshold.mc));
} catch (NumberFormatException nfe) {
numberError = true;
}
} else {
numberError = true;
}
if (!numberError) {
clipboard.add(new Vertex(start));
}
}
}
}
}
if (new RotateDialog(getShell(), null, clipboard).open() == IDialogConstants.OK_ID) {
c3d.getLockableDatFileReference().getVertexManager().setXyzOrTranslateOrTransform(RotateDialog.getAngles(), RotateDialog.getPivot(), TransformationMode.ROTATE, RotateDialog.isX(), RotateDialog.isY(), TranslateDialog.isZ(), true);
}
return;
}
}
}
});
mntm_Scale[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
TreeSet<Vertex> clipboard = new TreeSet<Vertex>();
if (VertexManager.getClipboard().size() == 1) {
GData vertex = VertexManager.getClipboard().get(0);
if (vertex.type() == 0) {
String line = vertex.toString();
line = line.replaceAll("\\s+", " ").trim(); //$NON-NLS-1$ //$NON-NLS-2$
String[] data_segments = line.split("\\s+"); //$NON-NLS-1$
if (line.startsWith("0 !LPE")) { //$NON-NLS-1$
if (line.startsWith("VERTEX ", 7)) { //$NON-NLS-1$
Vector3d start = new Vector3d();
boolean numberError = false;
if (data_segments.length == 6) {
try {
start.setX(new BigDecimal(data_segments[3], Threshold.mc));
} catch (NumberFormatException nfe) {
numberError = true;
}
try {
start.setY(new BigDecimal(data_segments[4], Threshold.mc));
} catch (NumberFormatException nfe) {
numberError = true;
}
try {
start.setZ(new BigDecimal(data_segments[5], Threshold.mc));
} catch (NumberFormatException nfe) {
numberError = true;
}
} else {
numberError = true;
}
if (!numberError) {
clipboard.add(new Vertex(start));
}
}
}
}
}
if (new ScaleDialog(getShell(), null, clipboard).open() == IDialogConstants.OK_ID) {
c3d.getLockableDatFileReference().getVertexManager().setXyzOrTranslateOrTransform(ScaleDialog.getScaleFactors(), ScaleDialog.getPivot(), TransformationMode.SCALE, ScaleDialog.isX(), ScaleDialog.isY(), ScaleDialog.isZ(), true);
}
return;
}
}
}
});
mntm_Edger2[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
if (new EdgerDialog(getShell(), es).open() == IDialogConstants.OK_ID)
vm.addEdges(es);
return;
}
}
}
});
mntm_Rectifier[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
if (new RectifierDialog(getShell(), rs).open() == IDialogConstants.OK_ID)
vm.rectify(rs, true);
return;
}
}
}
});
mntm_Isecalc[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
if (new IsecalcDialog(getShell(), is).open() == IDialogConstants.OK_ID)
vm.isecalc(is);
return;
}
}
}
});
mntm_SlicerPro[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
if (new SlicerProDialog(getShell(), ss).open() == IDialogConstants.OK_ID)
vm.slicerpro(ss);
return;
}
}
}
});
mntm_Intersector[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
if (new IntersectorDialog(getShell(), ins).open() == IDialogConstants.OK_ID)
vm.intersector(ins, true);
return;
}
}
}
});
mntm_Lines2Pattern[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
if (new Lines2PatternDialog(getShell()).open() == IDialogConstants.OK_ID)
vm.lines2pattern();
return;
}
}
}
});
mntm_PathTruder[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
if (new PathTruderDialog(getShell(), ps).open() == IDialogConstants.OK_ID)
vm.pathTruder(ps);
return;
}
}
}
});
mntm_SymSplitter[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
if (new SymSplitterDialog(getShell(), sims).open() == IDialogConstants.OK_ID)
vm.symSplitter(sims);
return;
}
}
}
});
mntm_Unificator[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
if (new UnificatorDialog(getShell(), us).open() == IDialogConstants.OK_ID)
vm.unificator(us);
return;
}
}
}
});
mntm_Txt2Dat[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
DatFile df = c3d.getLockableDatFileReference();
if (df.isReadOnly()) return;
VertexManager vm = df.getVertexManager();
if (new Txt2DatDialog(getShell(), ts).open() == IDialogConstants.OK_ID && !ts.getText().trim().isEmpty()) {
java.awt.Font myFont;
if (ts.getFontData() == null) {
myFont = new java.awt.Font(org.nschmidt.ldparteditor.enums.Font.MONOSPACE.getFontData()[0].getName(), java.awt.Font.PLAIN, 32);
} else {
FontData fd = ts.getFontData();
int style = 0;
final int c2 = SWT.BOLD | SWT.ITALIC;
switch (fd.getStyle()) {
case c2:
style = java.awt.Font.BOLD | java.awt.Font.ITALIC;
break;
case SWT.BOLD:
style = java.awt.Font.BOLD;
break;
case SWT.ITALIC:
style = java.awt.Font.ITALIC;
break;
case SWT.NORMAL:
style = java.awt.Font.PLAIN;
break;
}
myFont = new java.awt.Font(fd.getName(), style, fd.getHeight());
}
GData anchorData = df.getDrawChainTail();
int lineNumber = df.getDrawPerLine_NOCLONE().getKey(anchorData);
Set<GData> triangleSet = TextTriangulator.triangulateText(myFont, ts.getText().trim(), ts.getFlatness().doubleValue(), ts.getInterpolateFlatness().doubleValue(), View.DUMMY_REFERENCE, df, ts.getFontHeight().intValue(), ts.getDeltaAngle().doubleValue());
for (GData gda3 : triangleSet) {
lineNumber++;
df.getDrawPerLine_NOCLONE().put(lineNumber, gda3);
GData gdata = gda3;
anchorData.setNext(gda3);
anchorData = gdata;
}
anchorData.setNext(null);
df.setDrawChainTail(anchorData);
vm.setModified(true);
return;
}
}
}
}
});
// MARK Options
mntm_ResetSettingsOnRestart[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_WARNING | SWT.OK | SWT.CANCEL);
messageBox.setText("Warning:"); //$NON-NLS-1$ I18N
messageBox.setMessage("Are you sure to delete your configuration on the next start?"); //$NON-NLS-1$
int result = messageBox.open();
if (result == SWT.CANCEL) {
return;
}
WorkbenchManager.getUserSettingState().setResetOnStart(true);
}
});
mntm_SelectAnotherLDConfig[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
FileDialog fd = new FileDialog(sh, SWT.OPEN);
fd.setText("Open LDraw Configuration File (LDConfig.ldr):"); //$NON-NLS-1$ I18N Needs translation!
fd.setFilterPath(WorkbenchManager.getUserSettingState().getLdrawFolderPath());
String[] filterExt = { "*.ldr", "LDConfig.ldr", "*.*" }; //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
fd.setFilterExtensions(filterExt);
String[] filterNames = { "LDraw Configuration File (*.ldr)", "LDraw Configuration File (LDConfig.ldr)", "All Files" }; //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ I18N Needs translation!
fd.setFilterNames(filterNames);
String selected = fd.open();
System.out.println(selected);
if (selected != null && View.loadLDConfig(selected)) {
GData.CACHE_warningsAndErrors.clear();
WorkbenchManager.getUserSettingState().setLdConfigPath(selected);
Set<DatFile> dfs = new HashSet<DatFile>();
for (OpenGLRenderer renderer : renders) {
dfs.add(renderer.getC3D().getLockableDatFileReference());
}
for (DatFile df : dfs) {
SubfileCompiler.compile(df);
}
}
}
});
mntm_SyncWithTextEditor[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
WorkbenchManager.getUserSettingState().getSyncWithTextEditor().set(mntm_SyncWithTextEditor[0].getSelection());
}
});
mntm_SyncLpeInline[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
WorkbenchManager.getUserSettingState().getSyncWithLpeInline().set(mntm_SyncLpeInline[0].getSelection());
}
});
// MARK Merge, split...
mntm_Flip[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.flipSelection();
return;
}
}
}
});
mntm_SubdivideCatmullClark[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.subdivideCatmullClark();
return;
}
}
}
});
mntm_SubdivideLoop[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
vm.subdivideLoop();
return;
}
}
}
});
// MARK Background PNG
btn_PngFocus[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
Composite3D c3d = null;
for (OpenGLRenderer renderer : renders) {
c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
c3d = c3d.getLockableDatFileReference().getLastSelectedComposite();
if (c3d == null) {
c3d = renderer.getC3D();
}
break;
}
}
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (png == null) {
if (c3d.getLockableDatFileReference().hasNoBackgroundPictures()) {
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
vm.setSelectedBgPicture(c3d.getLockableDatFileReference().getBackgroundPicture(0));
}
png = vm.getSelectedBgPicture();
updateBgPictureTab();
}
Matrix4f tMatrix = new Matrix4f();
tMatrix.setIdentity();
tMatrix = tMatrix.scale(new Vector3f(png.scale.x, png.scale.y, png.scale.z));
Matrix4f dMatrix = new Matrix4f();
dMatrix.setIdentity();
Matrix4f.rotate((float) (png.angleB.doubleValue() / 180.0 * Math.PI), new Vector3f(1f, 0f, 0f), dMatrix, dMatrix);
Matrix4f.rotate((float) (png.angleA.doubleValue() / 180.0 * Math.PI), new Vector3f(0f, 1f, 0f), dMatrix, dMatrix);
Matrix4f.mul(dMatrix, tMatrix, tMatrix);
Vector4f vx = Matrix4f.transform(dMatrix, new Vector4f(png.offset.x, 0f, 0f, 1f), null);
Vector4f vy = Matrix4f.transform(dMatrix, new Vector4f(0f, png.offset.y, 0f, 1f), null);
Vector4f vz = Matrix4f.transform(dMatrix, new Vector4f(0f, 0f, png.offset.z, 1f), null);
Matrix4f transMatrix = new Matrix4f();
transMatrix.setIdentity();
transMatrix.m30 = -vx.x;
transMatrix.m31 = -vx.y;
transMatrix.m32 = -vx.z;
transMatrix.m30 -= vy.x;
transMatrix.m31 -= vy.y;
transMatrix.m32 -= vy.z;
transMatrix.m30 -= vz.x;
transMatrix.m31 -= vz.y;
transMatrix.m32 -= vz.z;
Matrix4f rotMatrixD = new Matrix4f();
rotMatrixD.setIdentity();
Matrix4f.rotate((float) (png.angleB.doubleValue() / 180.0 * Math.PI), new Vector3f(1f, 0f, 0f), rotMatrixD, rotMatrixD);
Matrix4f.rotate((float) (png.angleA.doubleValue() / 180.0 * Math.PI), new Vector3f(0f, 1f, 0f), rotMatrixD, rotMatrixD);
rotMatrixD = rotMatrixD.scale(new Vector3f(-1f, 1f, -1f));
rotMatrixD.invert();
c3d.getRotation().load(rotMatrixD);
c3d.getTranslation().load(transMatrix);
c3d.getPerspectiveCalculator().calculateOriginData();
vm.setSelectedBgPicture(png);
return;
}
});
btn_PngImage[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (updatingPngPictureTab) return;
if (png == null) {
if (c3d.getLockableDatFileReference().hasNoBackgroundPictures()) {
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
vm.setSelectedBgPicture(c3d.getLockableDatFileReference().getBackgroundPicture(0));
}
png = vm.getSelectedBgPicture();
updateBgPictureTab();
}
FileDialog fd = new FileDialog(getShell(), SWT.SAVE);
fd.setText("Open PNG Image"); //$NON-NLS-1$ I18N Needs translation!
try {
File f = new File(png.texturePath);
fd.setFilterPath(f.getParent());
fd.setFileName(f.getName());
} catch (Exception ex) {
}
String[] filterExt = { "*.png", "*.*" }; //$NON-NLS-1$ //$NON-NLS-2$
fd.setFilterExtensions(filterExt);
String[] filterNames = { "Portable Network Graphics (*.png)", "All Files" }; //$NON-NLS-1$ //$NON-NLS-2$ I18N Needs translation!
fd.setFilterNames(filterNames);
String texturePath = fd.open();
if (texturePath != null) {
String newText = png.getString(png.offset, png.angleA, png.angleB, png.angleC, png.scale, texturePath);
GDataPNG newPngPicture = new GDataPNG(newText, png.offset, png.angleA, png.angleB, png.angleC, png.scale, texturePath);
replaceBgPicture(png, newPngPicture, c3d.getLockableDatFileReference());
pngPictureUpdateCounter++;
if (pngPictureUpdateCounter > 3) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeOldTextures();
}
pngPictureUpdateCounter = 0;
}
vm.setModified(true);
}
return;
}
}
}
});
btn_PngNext[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
DatFile df = c3d.getLockableDatFileReference();
if (df.equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = df.getVertexManager();
GDataPNG sp = vm.getSelectedBgPicture();
boolean noBgPictures = df.hasNoBackgroundPictures();
vm.setSelectedBgPictureIndex(vm.getSelectedBgPictureIndex() + 1);
boolean indexOutOfBounds = vm.getSelectedBgPictureIndex() >= df.getBackgroundPictureCount();
boolean noRealData = df.getDrawPerLine_NOCLONE().getKey(sp) == null;
if (noBgPictures) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeAllTextures();
}
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
if (indexOutOfBounds) vm.setSelectedBgPictureIndex(0);
if (noRealData) {
vm.setSelectedBgPictureIndex(0);
vm.setSelectedBgPicture(df.getBackgroundPicture(0));
} else {
vm.setSelectedBgPicture(df.getBackgroundPicture(vm.getSelectedBgPictureIndex()));
}
}
updateBgPictureTab();
}
}
}
});
btn_PngPrevious[0].addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
DatFile df = c3d.getLockableDatFileReference();
if (df.equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = df.getVertexManager();
GDataPNG sp = vm.getSelectedBgPicture();
boolean noBgPictures = df.hasNoBackgroundPictures();
vm.setSelectedBgPictureIndex(vm.getSelectedBgPictureIndex() - 1);
boolean indexOutOfBounds = vm.getSelectedBgPictureIndex() < 0;
boolean noRealData = df.getDrawPerLine_NOCLONE().getKey(sp) == null;
if (noBgPictures) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeAllTextures();
}
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
if (indexOutOfBounds) vm.setSelectedBgPictureIndex(df.getBackgroundPictureCount() - 1);
if (noRealData) {
vm.setSelectedBgPictureIndex(0);
vm.setSelectedBgPicture(df.getBackgroundPicture(0));
} else {
vm.setSelectedBgPicture(df.getBackgroundPicture(vm.getSelectedBgPictureIndex()));
}
}
updateBgPictureTab();
}
}
}
});
spn_PngA1[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (updatingPngPictureTab) return;
if (png == null) {
if (c3d.getLockableDatFileReference().hasNoBackgroundPictures()) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeAllTextures();
}
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
vm.setSelectedBgPicture(c3d.getLockableDatFileReference().getBackgroundPicture(0));
}
png = vm.getSelectedBgPicture();
updateBgPictureTab();
}
String newText = png.getString(png.offset, spn.getValue(), png.angleB, png.angleC, png.scale, png.texturePath);
GDataPNG newPngPicture = new GDataPNG(newText, png.offset, spn.getValue(), png.angleB, png.angleC, png.scale, png.texturePath);
replaceBgPicture(png, newPngPicture, c3d.getLockableDatFileReference());
pngPictureUpdateCounter++;
if (pngPictureUpdateCounter > 3) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeOldTextures();
}
pngPictureUpdateCounter = 0;
}
vm.setModified(true);
return;
}
}
}
});
spn_PngA2[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (updatingPngPictureTab) return;
if (png == null) {
if (c3d.getLockableDatFileReference().hasNoBackgroundPictures()) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeAllTextures();
}
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
vm.setSelectedBgPicture(c3d.getLockableDatFileReference().getBackgroundPicture(0));
}
png = vm.getSelectedBgPicture();
updateBgPictureTab();
}
String newText = png.getString(png.offset, png.angleA, spn.getValue(), png.angleC, png.scale, png.texturePath);
GDataPNG newPngPicture = new GDataPNG(newText, png.offset, png.angleA, spn.getValue(), png.angleC, png.scale, png.texturePath);
replaceBgPicture(png, newPngPicture, c3d.getLockableDatFileReference());
pngPictureUpdateCounter++;
if (pngPictureUpdateCounter > 3) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeOldTextures();
}
pngPictureUpdateCounter = 0;
}
vm.setModified(true);
return;
}
}
}
});
spn_PngA3[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (updatingPngPictureTab) return;
if (png == null) {
if (c3d.getLockableDatFileReference().hasNoBackgroundPictures()) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeAllTextures();
}
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
vm.setSelectedBgPicture(c3d.getLockableDatFileReference().getBackgroundPicture(0));
}
png = vm.getSelectedBgPicture();
updateBgPictureTab();
}
String newText = png.getString(png.offset, png.angleA, png.angleB, spn.getValue(), png.scale, png.texturePath);
GDataPNG newPngPicture = new GDataPNG(newText, png.offset, png.angleA, png.angleB, spn.getValue(), png.scale, png.texturePath);
replaceBgPicture(png, newPngPicture, c3d.getLockableDatFileReference());
pngPictureUpdateCounter++;
if (pngPictureUpdateCounter > 3) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeOldTextures();
}
pngPictureUpdateCounter = 0;
}
vm.setModified(true);
return;
}
}
}
});
spn_PngSX[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (updatingPngPictureTab) return;
if (png == null) {
if (c3d.getLockableDatFileReference().hasNoBackgroundPictures()) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeAllTextures();
}
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
vm.setSelectedBgPicture(c3d.getLockableDatFileReference().getBackgroundPicture(0));
}
png = vm.getSelectedBgPicture();
updateBgPictureTab();
}
Vertex newScale = new Vertex(spn.getValue(), png.scale.Y, png.scale.Z);
String newText = png.getString(png.offset, png.angleA, png.angleB, png.angleC, newScale, png.texturePath);
GDataPNG newPngPicture = new GDataPNG(newText, png.offset, png.angleA, png.angleB, png.angleC, newScale, png.texturePath);
replaceBgPicture(png, newPngPicture, c3d.getLockableDatFileReference());
pngPictureUpdateCounter++;
if (pngPictureUpdateCounter > 3) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeOldTextures();
}
pngPictureUpdateCounter = 0;
}
vm.setModified(true);
return;
}
}
}
});
spn_PngSY[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (updatingPngPictureTab) return;
if (png == null) {
if (c3d.getLockableDatFileReference().hasNoBackgroundPictures()) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeAllTextures();
}
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
vm.setSelectedBgPicture(c3d.getLockableDatFileReference().getBackgroundPicture(0));
}
png = vm.getSelectedBgPicture();
updateBgPictureTab();
}
Vertex newScale = new Vertex(png.scale.X, spn.getValue(), png.scale.Z);
String newText = png.getString(png.offset, png.angleA, png.angleB, png.angleC, newScale, png.texturePath);
GDataPNG newPngPicture = new GDataPNG(newText, png.offset, png.angleA, png.angleB, png.angleC, newScale, png.texturePath);
replaceBgPicture(png, newPngPicture, c3d.getLockableDatFileReference());
vm.setModified(true);
return;
}
}
}
});
spn_PngX[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (updatingPngPictureTab) return;
if (png == null) {
if (c3d.getLockableDatFileReference().hasNoBackgroundPictures()) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeAllTextures();
}
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
vm.setSelectedBgPicture(c3d.getLockableDatFileReference().getBackgroundPicture(0));
}
png = vm.getSelectedBgPicture();
updateBgPictureTab();
}
Vertex newOffset = new Vertex(spn.getValue(), png.offset.Y, png.offset.Z);
String newText = png.getString(newOffset, png.angleA, png.angleB, png.angleC, png.scale, png.texturePath);
GDataPNG newPngPicture = new GDataPNG(newText, newOffset, png.angleA, png.angleB, png.angleC, png.scale, png.texturePath);
replaceBgPicture(png, newPngPicture, c3d.getLockableDatFileReference());
pngPictureUpdateCounter++;
if (pngPictureUpdateCounter > 3) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeOldTextures();
}
pngPictureUpdateCounter = 0;
}
vm.setModified(true);
return;
}
}
}
});
spn_PngY[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (updatingPngPictureTab) return;
if (png == null) {
if (c3d.getLockableDatFileReference().hasNoBackgroundPictures()) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeAllTextures();
}
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
vm.setSelectedBgPicture(c3d.getLockableDatFileReference().getBackgroundPicture(0));
}
png = vm.getSelectedBgPicture();
updateBgPictureTab();
}
Vertex newOffset = new Vertex(png.offset.X, spn.getValue(), png.offset.Z);
String newText = png.getString(newOffset, png.angleA, png.angleB, png.angleC, png.scale, png.texturePath);
GDataPNG newPngPicture = new GDataPNG(newText, newOffset, png.angleA, png.angleB, png.angleC, png.scale, png.texturePath);
replaceBgPicture(png, newPngPicture, c3d.getLockableDatFileReference());
pngPictureUpdateCounter++;
if (pngPictureUpdateCounter > 3) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeOldTextures();
}
pngPictureUpdateCounter = 0;
}
vm.setModified(true);
return;
}
}
}
});
spn_PngZ[0].addValueChangeListener(new ValueChangeAdapter() {
@Override
public void valueChanged(BigDecimalSpinner spn) {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit()) && !Project.getFileToEdit().isReadOnly()) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (updatingPngPictureTab) return;
if (png == null) {
if (c3d.getLockableDatFileReference().hasNoBackgroundPictures()) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeAllTextures();
}
vm.addBackgroundPicture("", new Vertex(BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, new Vertex(BigDecimal.ONE, BigDecimal.ONE, BigDecimal.ONE), Project.getProjectPath() + File.separator + ".png"); //$NON-NLS-1$ //$NON-NLS-2$
} else {
vm.setSelectedBgPicture(c3d.getLockableDatFileReference().getBackgroundPicture(0));
}
png = vm.getSelectedBgPicture();
updateBgPictureTab();
}
Vertex newOffset = new Vertex(png.offset.X, png.offset.Y, spn.getValue());
String newText = png.getString(newOffset, png.angleA, png.angleB, png.angleC, png.scale, png.texturePath);
GDataPNG newPngPicture = new GDataPNG(newText, newOffset, png.angleA, png.angleB, png.angleC, png.scale, png.texturePath);
replaceBgPicture(png, newPngPicture, c3d.getLockableDatFileReference());
pngPictureUpdateCounter++;
if (pngPictureUpdateCounter > 3) {
for (OpenGLRenderer renderer2 : renders) {
renderer2.disposeOldTextures();
}
pngPictureUpdateCounter = 0;
}
vm.setModified(true);
return;
}
}
}
});
Project.createDefault();
treeItem_Project[0].setData(Project.getProjectPath());
treeItem_Official[0].setData(WorkbenchManager.getUserSettingState().getLdrawFolderPath());
treeItem_Unofficial[0].setData(WorkbenchManager.getUserSettingState().getUnofficialFolderPath());
LibraryManager.readUnofficialParts(treeItem_UnofficialParts[0]);
LibraryManager.readUnofficialSubparts(treeItem_UnofficialSubparts[0]);
LibraryManager.readUnofficialPrimitives(treeItem_UnofficialPrimitives[0]);
LibraryManager.readUnofficialHiResPrimitives(treeItem_UnofficialPrimitives48[0]);
LibraryManager.readOfficialParts(treeItem_OfficialParts[0]);
LibraryManager.readOfficialSubparts(treeItem_OfficialSubparts[0]);
LibraryManager.readOfficialPrimitives(treeItem_OfficialPrimitives[0]);
LibraryManager.readOfficialHiResPrimitives(treeItem_OfficialPrimitives48[0]);
txt_Search[0].setText(" "); //$NON-NLS-1$
txt_Search[0].setText(""); //$NON-NLS-1$
Project.getFileToEdit().setLastSelectedComposite(Editor3DWindow.renders.get(0).getC3D());
new EditorTextWindow().run(Project.getFileToEdit());
updateBgPictureTab();
this.open();
// Dispose all resources (never delete this!)
ResourceManager.dispose();
SWTResourceManager.dispose();
// Dispose the display (never delete this, too!)
Display.getCurrent().dispose();
}
private void replaceBgPicture(GDataPNG selectedBgPicture, GDataPNG newBgPicture, DatFile linkedDatFile) {
if (linkedDatFile.getDrawPerLine_NOCLONE().getKey(selectedBgPicture) == null) return;
GData before = selectedBgPicture.getBefore();
GData next = selectedBgPicture.getNext();
int index = linkedDatFile.getDrawPerLine_NOCLONE().getKey(selectedBgPicture);
selectedBgPicture.setGoingToBeReplaced(true);
linkedDatFile.getVertexManager().remove(selectedBgPicture);
linkedDatFile.getDrawPerLine_NOCLONE().put(index, newBgPicture);
before.setNext(newBgPicture);
newBgPicture.setNext(next);
linkedDatFile.getVertexManager().setSelectedBgPicture(newBgPicture);
updateBgPictureTab();
return;
}
private void resetAddState() {
setAddingSubfiles(false);
setAddingVertices(false);
setAddingLines(false);
setAddingTriangles(false);
setAddingQuads(false);
setAddingCondlines(false);
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
DatFile df = c3d.getLockableDatFileReference();
df.setObjVertex1(null);
df.setObjVertex2(null);
df.setObjVertex3(null);
df.setObjVertex4(null);
df.setNearestObjVertex1(null);
df.setNearestObjVertex2(null);
}
}
/**
* Create the actions.
*/
private void createActions() {
// Create the actions
// {
// menuItem_Open = new Action(I18n.EDITOR3D_Open) {
// @Override
// public void run() {
//
// }
// };
// menuItem_Open.setAccelerator(SWT.CTRL | 'Z');
// }
// {
// menuItem_Exit = new Action(I18n.EDITOR3D_Exit) {
// @Override
// public void run() {
//
// }
// };
// }
// {
// toolItem_Save = new Action(I18n.EDITOR3D_Save) {
// };
// toolItem_Save.setImageDescriptor(ImageDescriptor.createFromImage(ResourceManager.getImage("icon32_document-save.png"))); //$NON-NLS-1$
// toolItem_Save.setAccelerator(SWT.CTRL | 'S');
// }
// mnu_File[0].add(menuItem_Open);
// mnu_File[0].add(toolItem_Save);
// mnu_File[0].add(new Separator());
// mnu_File[0].add(menuItem_Exit);
// mnu_File[0].getParent().update(true);
}
/**
* The Shell-Close-Event
*/
@Override
protected void handleShellCloseEvent() {
boolean unsavedProjectFiles = false;
Set<DatFile> unsavedFiles = new HashSet<DatFile>(Project.getUnsavedFiles());
for (DatFile df : unsavedFiles) {
if (!df.getText().equals(df.getOriginalText()) || df.isVirtual() && !df.getText().trim().isEmpty()) {
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_QUESTION | SWT.YES | SWT.CANCEL | SWT.NO);
messageBox.setText(I18n.DIALOG_UnsavedChangesTitle);
Object[] messageArguments = {df.getShortName()};
MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$
formatter.setLocale(View.LOCALE);
formatter.applyPattern(I18n.DIALOG_UnsavedChanges);
messageBox.setMessage(formatter.format(messageArguments));
int result = messageBox.open();
if (result == SWT.NO) {
// Remove file from tree
updateTree_removeEntry(df);
} else if (result == SWT.YES) {
if (df.save()) {
} else {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.OK);
messageBoxError.setText(I18n.DIALOG_Error);
messageBoxError.setMessage(I18n.DIALOG_CantSaveFile);
messageBoxError.open();
cleanupClosedData();
updateTree_unsavedEntries();
return;
}
} else {
cleanupClosedData();
updateTree_unsavedEntries();
return;
}
}
}
Set<EditorTextWindow> ow = new HashSet<EditorTextWindow>(Project.getOpenTextWindows());
for (EditorTextWindow w : ow) {
w.getShell().close();
}
{
ArrayList<TreeItem> ta = getProjectParts().getItems();
for (TreeItem ti : ta) {
unsavedProjectFiles = unsavedProjectFiles || !((DatFile) ti.getData()).getText().trim().equals("") || !Project.getUnsavedFiles().contains(ti.getData()); //$NON-NLS-1$
}
}
{
ArrayList<TreeItem> ta = getProjectSubparts().getItems();
for (TreeItem ti : ta) {
unsavedProjectFiles = unsavedProjectFiles || !((DatFile) ti.getData()).getText().trim().equals("") || !Project.getUnsavedFiles().contains(ti.getData()); ; //$NON-NLS-1$
}
}
{
ArrayList<TreeItem> ta = getProjectPrimitives().getItems();
for (TreeItem ti : ta) {
unsavedProjectFiles = unsavedProjectFiles || !((DatFile) ti.getData()).getText().trim().equals("") || !Project.getUnsavedFiles().contains(ti.getData()); ; //$NON-NLS-1$
}
}
{
ArrayList<TreeItem> ta = getProjectPrimitives48().getItems();
for (TreeItem ti : ta) {
unsavedProjectFiles = unsavedProjectFiles || !((DatFile) ti.getData()).getText().trim().equals("") || !Project.getUnsavedFiles().contains(ti.getData()); ; //$NON-NLS-1$
}
}
if (unsavedProjectFiles && Project.isDefaultProject()) {
// Save new project here, if the project contains at least one non-empty file
boolean cancelIt = false;
boolean secondRun = false;
while (true) {
int result = IDialogConstants.CANCEL_ID;
if (secondRun) result = new NewProjectDialog(true).open();
if (result == IDialogConstants.OK_ID) {
while (new File(Project.getTempProjectPath()).isDirectory()) {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.YES | SWT.CANCEL | SWT.NO);
messageBoxError.setText(I18n.PROJECT_ProjectOverwriteTitle);
messageBoxError.setMessage(I18n.PROJECT_ProjectOverwrite);
int result2 = messageBoxError.open();
if (result2 == SWT.NO) {
result = new NewProjectDialog(true).open();
} else if (result2 == SWT.YES) {
break;
} else {
cancelIt = true;
break;
}
}
if (!cancelIt) {
Project.setProjectName(Project.getTempProjectName());
Project.setProjectPath(Project.getTempProjectPath());
NLogger.debug(getClass(), "Saving new project..."); //$NON-NLS-1$
if (!Project.save()) {
MessageBox messageBoxError = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.OK);
messageBoxError.setText(I18n.DIALOG_Error);
messageBoxError.setMessage(I18n.DIALOG_CantSaveProject);
}
}
break;
} else {
secondRun = true;
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_QUESTION | SWT.YES | SWT.CANCEL | SWT.NO);
messageBox.setText(I18n.DIALOG_UnsavedChangesTitle);
Object[] messageArguments = {I18n.DIALOG_TheNewProject};
MessageFormat formatter = new MessageFormat(""); //$NON-NLS-1$
formatter.setLocale(View.LOCALE);
formatter.applyPattern(I18n.DIALOG_UnsavedChanges);
messageBox.setMessage(formatter.format(messageArguments));
int result2 = messageBox.open();
if (result2 == SWT.CANCEL) {
cancelIt = true;
break;
} else if (result2 == SWT.NO) {
break;
}
}
}
if (cancelIt) {
cleanupClosedData();
updateTree_unsavedEntries();
return;
}
}
// NEVER DELETE THIS!
final int s = renders.size();
for (int i = 0; i < s; i++) {
GLCanvas canvas = canvasList.get(i);
OpenGLRenderer renderer = renders.get(i);
if (!canvas.isCurrent()) {
canvas.setCurrent();
try {
GLContext.useContext(canvas);
} catch (LWJGLException e) {
NLogger.error(OpenGLRenderer.class, e);
}
}
renderer.dispose();
}
// Save the workbench
WorkbenchManager.saveWorkbench();
setReturnCode(CANCEL);
close();
}
/**
* @return The serializable window state of the Editor3DWindow
*/
public Editor3DWindowState getEditor3DWindowState() {
return this.editor3DWindowState;
}
/**
* @param editor3DWindowState
* The serializable window state of the Editor3DWindow
*/
public void setEditor3DWindowState(Editor3DWindowState editor3DWindowState) {
this.editor3DWindowState = editor3DWindowState;
}
/**
* @return The current Editor3DWindow instance
*/
public static Editor3DWindow getWindow() {
return Editor3DWindow.window;
}
/**
* Updates the tree for new unsaved entries
*/
public void updateTree_unsavedEntries() {
ArrayList<TreeItem> categories = new ArrayList<TreeItem>();
categories.add(this.treeItem_ProjectParts[0]);
categories.add(this.treeItem_ProjectSubparts[0]);
categories.add(this.treeItem_ProjectPrimitives[0]);
categories.add(this.treeItem_ProjectPrimitives48[0]);
categories.add(this.treeItem_UnofficialParts[0]);
categories.add(this.treeItem_UnofficialSubparts[0]);
categories.add(this.treeItem_UnofficialPrimitives[0]);
categories.add(this.treeItem_UnofficialPrimitives48[0]);
int counter = 0;
for (TreeItem item : categories) {
counter++;
ArrayList<TreeItem> datFileTreeItems = item.getItems();
for (TreeItem df : datFileTreeItems) {
DatFile d = (DatFile) df.getData();
StringBuilder nameSb = new StringBuilder(new File(d.getNewName()).getName());
final String d2 = d.getDescription();
if (counter < 5 && (!d.getNewName().startsWith(Project.getProjectPath()) || !d.getNewName().replace(Project.getProjectPath() + File.separator, "").contains(File.separator))) { //$NON-NLS-1$
nameSb.insert(0, "(!) "); //$NON-NLS-1$
}
// MARK For Debug Only!
// DatType t = d.getType();
// if (t == DatType.PART) {
// nameSb.append(" PART"); //$NON-NLS-1$
// } else if (t == DatType.SUBPART) {
// nameSb.append(" SUBPART"); //$NON-NLS-1$
// } else if (t == DatType.PRIMITIVE) {
// nameSb.append(" PRIMITIVE"); //$NON-NLS-1$
// } else if (t == DatType.PRIMITIVE48) {
// nameSb.append(" PRIMITIVE48"); //$NON-NLS-1$
// }
if (d2 != null)
nameSb.append(d2);
if (Project.getUnsavedFiles().contains(d)) {
df.setText("* " + nameSb.toString()); //$NON-NLS-1$
} else {
df.setText(nameSb.toString());
}
}
}
this.treeItem_Unsaved[0].removeAll();
Set<DatFile> unsaved = Project.getUnsavedFiles();
for (DatFile df : unsaved) {
TreeItem ti = new TreeItem(this.treeItem_Unsaved[0], SWT.NONE);
StringBuilder nameSb = new StringBuilder(new File(df.getNewName()).getName());
final String d = df.getDescription();
if (d != null)
nameSb.append(d);
ti.setText(nameSb.toString());
ti.setData(df);
}
this.treeParts[0].build();
this.treeParts[0].redraw();
}
/**
* Updates the tree for renamed entries
*/
@SuppressWarnings("unchecked")
public void updateTree_renamedEntries() {
HashMap<String, TreeItem> categories = new HashMap<String, TreeItem>();
HashMap<String, DatType> types = new HashMap<String, DatType>();
ArrayList<String> validPrefixes = new ArrayList<String>();
{
String s = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "PARTS" + File.separator + "S" + File.separator; //$NON-NLS-1$ //$NON-NLS-2$
validPrefixes.add(s);
categories.put(s, this.treeItem_UnofficialSubparts[0]);
types.put(s, DatType.SUBPART);
}
{
String s = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "parts" + File.separator + "s" + File.separator; //$NON-NLS-1$ //$NON-NLS-2$
validPrefixes.add(s);
categories.put(s, this.treeItem_UnofficialSubparts[0]);
types.put(s, DatType.SUBPART);
}
{
String s = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "PARTS" + File.separator; //$NON-NLS-1$
validPrefixes.add(s);
categories.put(s, this.treeItem_UnofficialParts[0]);
types.put(s, DatType.PART);
}
{
String s = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "parts" + File.separator; //$NON-NLS-1$
validPrefixes.add(s);
categories.put(s,this.treeItem_UnofficialParts[0]);
types.put(s, DatType.PART);
}
{
String s = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "P" + File.separator + "48" + File.separator; //$NON-NLS-1$ //$NON-NLS-2$
validPrefixes.add(s);
categories.put(s, this.treeItem_UnofficialPrimitives48[0]);
types.put(s, DatType.PRIMITIVE48);
}
{
String s = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "p" + File.separator + "48" + File.separator; //$NON-NLS-1$ //$NON-NLS-2$
validPrefixes.add(s);
categories.put(s, this.treeItem_UnofficialPrimitives48[0]);
types.put(s, DatType.PRIMITIVE48);
}
{
String s = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "P" + File.separator; //$NON-NLS-1$
validPrefixes.add(s);
categories.put(s, this.treeItem_UnofficialPrimitives[0]);
types.put(s, DatType.PRIMITIVE);
}
{
String s = WorkbenchManager.getUserSettingState().getUnofficialFolderPath() + File.separator + "p" + File.separator; //$NON-NLS-1$
validPrefixes.add(s);
categories.put(s, this.treeItem_UnofficialPrimitives[0]);
types.put(s, DatType.PRIMITIVE);
}
{
String s = Project.getProjectPath() + File.separator + "PARTS" + File.separator + "S" + File.separator; //$NON-NLS-1$ //$NON-NLS-2$
validPrefixes.add(s);
categories.put(s, this.treeItem_ProjectSubparts[0]);
types.put(s, DatType.SUBPART);
}
{
String s = Project.getProjectPath() + File.separator + "parts" + File.separator + "s" + File.separator; //$NON-NLS-1$ //$NON-NLS-2$
validPrefixes.add(s);
categories.put(s, this.treeItem_ProjectSubparts[0]);
types.put(s, DatType.SUBPART);
}
{
String s = Project.getProjectPath() + File.separator + "PARTS" + File.separator; //$NON-NLS-1$
validPrefixes.add(s);
categories.put(s, this.treeItem_ProjectParts[0]);
types.put(s, DatType.PART);
}
{
String s = Project.getProjectPath() + File.separator + "parts" + File.separator; //$NON-NLS-1$
validPrefixes.add(s);
categories.put(s, this.treeItem_ProjectParts[0]);
types.put(s, DatType.PART);
}
{
String s = Project.getProjectPath() + File.separator + "P" + File.separator + "48" + File.separator; //$NON-NLS-1$ //$NON-NLS-2$
validPrefixes.add(s);
categories.put(s, this.treeItem_ProjectPrimitives48[0]);
types.put(s, DatType.PRIMITIVE48);
}
{
String s = Project.getProjectPath() + File.separator + "p" + File.separator + "48" + File.separator; //$NON-NLS-1$ //$NON-NLS-2$
validPrefixes.add(s);
categories.put(s, this.treeItem_ProjectPrimitives48[0]);
types.put(s, DatType.PRIMITIVE48);
}
{
String s = Project.getProjectPath() + File.separator + "P" + File.separator; //$NON-NLS-1$
validPrefixes.add(s);
categories.put(s, this.treeItem_ProjectPrimitives[0]);
types.put(s, DatType.PRIMITIVE);
}
{
String s = Project.getProjectPath() + File.separator + "p" + File.separator; //$NON-NLS-1$
validPrefixes.add(s);
categories.put(s, this.treeItem_ProjectPrimitives[0]);
types.put(s, DatType.PRIMITIVE);
}
Collections.sort(validPrefixes, new Comp());
for (String prefix : validPrefixes) {
TreeItem item = categories.get(prefix);
ArrayList<DatFile> dats = (ArrayList<DatFile>) item.getData();
ArrayList<TreeItem> datFileTreeItems = item.getItems();
Set<TreeItem> itemsToRemove = new HashSet<TreeItem>();
for (TreeItem df : datFileTreeItems) {
DatFile d = (DatFile) df.getData();
String newName = d.getNewName();
String validPrefix = null;
for (String p2 : validPrefixes) {
if (newName.startsWith(p2)) {
validPrefix = p2;
break;
}
}
if (validPrefix != null) {
TreeItem item2 = categories.get(validPrefix);
if (!item2.equals(item)) {
itemsToRemove.add(df);
dats.remove(d);
((ArrayList<DatFile>) item2.getData()).add(d);
TreeItem nt = new TreeItem(item2, SWT.NONE);
nt.setText(df.getText());
d.setType(types.get(validPrefix));
nt.setData(d);
}
}
}
datFileTreeItems.removeAll(itemsToRemove);
}
this.treeParts[0].build();
this.treeParts[0].redraw();
}
private class Comp implements Comparator<String> {
@Override
public int compare(String o1, String o2) {
if (o1.length() < o2.length()) {
return 1;
} else if (o1.length() > o2.length()) {
return -1;
} else {
return 0;
}
}
}
/**
* Removes an item from the tree,<br><br>
* If it is open in a {@linkplain Composite3D}, this composite will be linked with a dummy file
* If it is open in a {@linkplain CompositeTab}, this composite will be closed
*
*/
public void updateTree_removeEntry(DatFile e) {
ArrayList<TreeItem> categories = new ArrayList<TreeItem>();
categories.add(this.treeItem_ProjectParts[0]);
categories.add(this.treeItem_ProjectSubparts[0]);
categories.add(this.treeItem_ProjectPrimitives[0]);
categories.add(this.treeItem_ProjectPrimitives48[0]);
categories.add(this.treeItem_UnofficialParts[0]);
categories.add(this.treeItem_UnofficialSubparts[0]);
categories.add(this.treeItem_UnofficialPrimitives[0]);
categories.add(this.treeItem_UnofficialPrimitives48[0]);
int counter = 0;
for (TreeItem item : categories) {
counter++;
ArrayList<TreeItem> datFileTreeItems = new ArrayList<TreeItem>(item.getItems());
for (TreeItem df : datFileTreeItems) {
DatFile d = (DatFile) df.getData();
if (e.equals(d)) {
item.getItems().remove(df);
} else {
StringBuilder nameSb = new StringBuilder(new File(d.getNewName()).getName());
final String d2 = d.getDescription();
if (counter < 5 && (!d.getNewName().startsWith(Project.getProjectPath()) || !d.getNewName().replace(Project.getProjectPath() + File.separator, "").contains(File.separator))) { //$NON-NLS-1$
nameSb.insert(0, "(!) "); //$NON-NLS-1$
}
if (d2 != null)
nameSb.append(d2);
if (Project.getUnsavedFiles().contains(d)) {
df.setText("* " + nameSb.toString()); //$NON-NLS-1$
} else {
df.setText(nameSb.toString());
}
}
}
}
this.treeItem_Unsaved[0].removeAll();
Project.removeUnsavedFile(e);
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(e)) {
c3d.unlinkData();
}
}
HashSet<EditorTextWindow> windows = new HashSet<EditorTextWindow>(Project.getOpenTextWindows());
for (EditorTextWindow win : windows) {
win.closeTabWithDatfile(e);
}
Set<DatFile> unsaved = Project.getUnsavedFiles();
for (DatFile df : unsaved) {
TreeItem ti = new TreeItem(this.treeItem_Unsaved[0], SWT.NONE);
StringBuilder nameSb = new StringBuilder(new File(df.getNewName()).getName());
final String d = df.getDescription();
if (d != null)
nameSb.append(d);
ti.setText(nameSb.toString());
ti.setData(df);
}
TreeItem[] folders = new TreeItem[8];
folders[0] = treeItem_ProjectParts[0];
folders[1] = treeItem_ProjectPrimitives[0];
folders[2] = treeItem_ProjectPrimitives48[0];
folders[3] = treeItem_ProjectSubparts[0];
folders[4] = treeItem_UnofficialParts[0];
folders[5] = treeItem_UnofficialPrimitives[0];
folders[6] = treeItem_UnofficialPrimitives48[0];
folders[7] = treeItem_UnofficialSubparts[0];
for (TreeItem folder : folders) {
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences =(ArrayList<DatFile>) folder.getData();
cachedReferences.remove(e);
}
this.treeParts[0].build();
this.treeParts[0].redraw();
}
// Helper functions
private void clickBtnTest(Button btn) {
WidgetSelectionHelper.unselectAllChildButtons((ToolItem) btn.getParent());
btn.setSelection(true);
}
private void clickSingleBtn(Button btn) {
boolean state = btn.getSelection();
WidgetSelectionHelper.unselectAllChildButtons((ToolItem) btn.getParent());
btn.setSelection(state);
}
public boolean isAddingSomething() {
return addingSomething;
}
public void setAddingSomething(boolean addingSomething) {
this.addingSomething = addingSomething;
for (OpenGLRenderer renderer : renders) {
renderer.getC3D().getLockableDatFileReference().getVertexManager().clearSelection();
}
}
public boolean isAddingVertices() {
return addingVertices;
}
public void setAddingVertices(boolean addingVertices) {
this.addingVertices = addingVertices;
}
public boolean isAddingLines() {
return addingLines;
}
public void setAddingLines(boolean addingLines) {
this.addingLines = addingLines;
}
public boolean isAddingTriangles() {
return addingTriangles;
}
public void setAddingTriangles(boolean addingTriangles) {
this.addingTriangles = addingTriangles;
}
public boolean isAddingQuads() {
return addingQuads;
}
public void setAddingQuads(boolean addingQuads) {
this.addingQuads = addingQuads;
}
public boolean isAddingCondlines() {
return addingCondlines;
}
public void setAddingCondlines(boolean addingCondlines) {
this.addingCondlines = addingCondlines;
}
public boolean isAddingSubfiles() {
return addingSubfiles;
}
public void setAddingSubfiles(boolean addingSubfiles) {
this.addingSubfiles = addingSubfiles;
}
public void disableAddAction() {
addingSomething = false;
addingVertices = false;
addingLines = false;
addingTriangles = false;
addingQuads = false;
addingCondlines = false;
addingSubfiles = false;
btn_AddVertex[0].setSelection(false);
btn_AddLine[0].setSelection(false);
btn_AddTriangle[0].setSelection(false);
btn_AddQuad[0].setSelection(false);
btn_AddCondline[0].setSelection(false);
btn_AddPrimitive[0].setSelection(false);
}
public TreeItem getProjectParts() {
return treeItem_ProjectParts[0];
}
public TreeItem getProjectPrimitives() {
return treeItem_ProjectPrimitives[0];
}
public TreeItem getProjectPrimitives48() {
return treeItem_ProjectPrimitives48[0];
}
public TreeItem getProjectSubparts() {
return treeItem_ProjectSubparts[0];
}
public TreeItem getUnofficialParts() {
return treeItem_UnofficialParts[0];
}
public TreeItem getUnofficialPrimitives() {
return treeItem_UnofficialPrimitives[0];
}
public TreeItem getUnofficialPrimitives48() {
return treeItem_UnofficialPrimitives48[0];
}
public TreeItem getUnofficialSubparts() {
return treeItem_UnofficialSubparts[0];
}
public TreeItem getOfficialParts() {
return treeItem_OfficialParts[0];
}
public TreeItem getOfficialPrimitives() {
return treeItem_OfficialPrimitives[0];
}
public TreeItem getOfficialPrimitives48() {
return treeItem_OfficialPrimitives48[0];
}
public TreeItem getOfficialSubparts() {
return treeItem_OfficialSubparts[0];
}
public TreeItem getUnsaved() {
return treeItem_Unsaved[0];
}
public int getWorkingType() {
return workingType;
}
public void setWorkingType(int workingMode) {
this.workingType = workingMode;
}
public boolean isMovingAdjacentData() {
return movingAdjacentData;
}
public void setMovingAdjacentData(boolean movingAdjacentData) {
this.movingAdjacentData = movingAdjacentData;
}
public int getWorkingAction() {
return workingAction;
}
public void setWorkingAction(int workingAction) {
this.workingAction = workingAction;
}
public int getTransformationMode() {
return transformationMode;
}
public boolean hasNoTransparentSelection() {
return noTransparentSelection;
}
public void setNoTransparentSelection(boolean noTransparentSelection) {
this.noTransparentSelection = noTransparentSelection;
}
public boolean hasBfcToggle() {
return bfcToggle;
}
public void setBfcToggle(boolean bfcToggle) {
this.bfcToggle = bfcToggle;
}
public GColour getLastUsedColour() {
return lastUsedColour;
}
public void setLastUsedColour(GColour lastUsedColour) {
this.lastUsedColour = lastUsedColour;
}
public void cleanupClosedData() {
Set<DatFile> openFiles = new HashSet<DatFile>(Project.getUnsavedFiles());
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
openFiles.add(c3d.getLockableDatFileReference());
}
for (EditorTextWindow w : Project.getOpenTextWindows()) {
for (CTabItem t : w.getTabFolder().getItems()) {
openFiles.add(((CompositeTab) t).getState().getFileNameObj());
}
}
Set<DatFile> deadFiles = new HashSet<DatFile>(Project.getParsedFiles());
deadFiles.removeAll(openFiles);
if (!deadFiles.isEmpty()) {
GData.CACHE_viewByProjection.clear();
GData.parsedLines.clear();
GData.CACHE_parsedFilesSource.clear();
}
for (DatFile datFile : deadFiles) {
datFile.disposeData();
}
if (!deadFiles.isEmpty()) {
// TODO Debug only System.gc();
}
}
public String getSearchCriteria() {
return txt_Search[0].getText();
}
public void resetSearch() {
search(""); //$NON-NLS-1$
}
public void search(final String word) {
this.getShell().getDisplay().asyncExec(new Runnable() {
@SuppressWarnings("unchecked")
@Override
public void run() {
String criteria = ".*" + word + ".*"; //$NON-NLS-1$ //$NON-NLS-2$
TreeItem[] folders = new TreeItem[12];
folders[0] = treeItem_OfficialParts[0];
folders[1] = treeItem_OfficialPrimitives[0];
folders[2] = treeItem_OfficialPrimitives48[0];
folders[3] = treeItem_OfficialSubparts[0];
folders[4] = treeItem_UnofficialParts[0];
folders[5] = treeItem_UnofficialPrimitives[0];
folders[6] = treeItem_UnofficialPrimitives48[0];
folders[7] = treeItem_UnofficialSubparts[0];
folders[8] = treeItem_ProjectParts[0];
folders[9] = treeItem_ProjectPrimitives[0];
folders[10] = treeItem_ProjectPrimitives48[0];
folders[11] = treeItem_ProjectSubparts[0];
if (folders[0].getData() == null) {
for (TreeItem folder : folders) {
folder.setData(new ArrayList<DatFile>());
for (TreeItem part : folder.getItems()) {
((ArrayList<DatFile>) folder.getData()).add((DatFile) part.getData());
}
}
}
try {
"42".matches(criteria); //$NON-NLS-1$
} catch (Exception ex) {
criteria = ".*"; //$NON-NLS-1$
}
for (int i = 0; i < 12; i++) {
TreeItem folder = folders[i];
folder.removeAll();
for (DatFile part : (ArrayList<DatFile>) folder.getData()) {
StringBuilder nameSb = new StringBuilder(new File(part.getNewName()).getName());
if (i > 7 && (!part.getNewName().startsWith(Project.getProjectPath()) || !part.getNewName().replace(Project.getProjectPath() + File.separator, "").contains(File.separator))) { //$NON-NLS-1$
nameSb.insert(0, "(!) "); //$NON-NLS-1$
}
final String d = part.getDescription();
if (d != null)
nameSb.append(d);
String name = nameSb.toString();
TreeItem finding = new TreeItem(folder, SWT.NONE);
// Save the path
finding.setData(part);
// Set the filename
if (Project.getUnsavedFiles().contains(part) || !part.getOldName().equals(part.getNewName())) {
// Insert asterisk if the file was
// modified
finding.setText("* " + name); //$NON-NLS-1$
} else {
finding.setText(name);
}
finding.setShown(!(d != null && d.startsWith(" - ~Moved to")) && name.matches(criteria)); //$NON-NLS-1$
}
}
folders[0].getParent().build();
folders[0].getParent().redraw();
folders[0].getParent().update();
}
});
}
public void closeAllComposite3D() {
ArrayList<OpenGLRenderer> renders2 = new ArrayList<OpenGLRenderer>(renders);
for (OpenGLRenderer renderer : renders2) {
Composite3D c3d = renderer.getC3D();
c3d.getModifier().closeView();
}
}
public TreeData getDatFileTreeData(DatFile df) {
TreeData result = new TreeData();
ArrayList<TreeItem> categories = new ArrayList<TreeItem>();
categories.add(this.treeItem_ProjectParts[0]);
categories.add(this.treeItem_ProjectSubparts[0]);
categories.add(this.treeItem_ProjectPrimitives[0]);
categories.add(this.treeItem_ProjectPrimitives48[0]);
categories.add(this.treeItem_UnofficialParts[0]);
categories.add(this.treeItem_UnofficialSubparts[0]);
categories.add(this.treeItem_UnofficialPrimitives[0]);
categories.add(this.treeItem_UnofficialPrimitives48[0]);
categories.add(this.treeItem_OfficialParts[0]);
categories.add(this.treeItem_OfficialSubparts[0]);
categories.add(this.treeItem_OfficialPrimitives[0]);
categories.add(this.treeItem_OfficialPrimitives48[0]);
categories.add(this.treeItem_Unsaved[0]);
for (TreeItem item : categories) {
ArrayList<TreeItem> datFileTreeItems = item.getItems();
for (TreeItem ti : datFileTreeItems) {
DatFile d = (DatFile) ti.getData();
if (df.equals(d)) {
result.setLocation(ti);
} else if (d.getShortName().equals(df.getShortName())) {
result.getLocationsWithSameShortFilenames().add(ti);
}
}
}
return result;
}
/**
* Updates the background picture tab
*/
public void updateBgPictureTab() {
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (c3d.getLockableDatFileReference().equals(Project.getFileToEdit())) {
VertexManager vm = c3d.getLockableDatFileReference().getVertexManager();
GDataPNG png = vm.getSelectedBgPicture();
if (png == null) {
updatingPngPictureTab = true;
txt_PngPath[0].setText("---"); //$NON-NLS-1$
txt_PngPath[0].setToolTipText("---"); //$NON-NLS-1$
spn_PngX[0].setValue(BigDecimal.ZERO);
spn_PngY[0].setValue(BigDecimal.ZERO);
spn_PngZ[0].setValue(BigDecimal.ZERO);
spn_PngA1[0].setValue(BigDecimal.ZERO);
spn_PngA2[0].setValue(BigDecimal.ZERO);
spn_PngA3[0].setValue(BigDecimal.ZERO);
spn_PngSX[0].setValue(BigDecimal.ONE);
spn_PngSY[0].setValue(BigDecimal.ONE);
txt_PngPath[0].setEnabled(false);
btn_PngFocus[0].setEnabled(false);
btn_PngImage[0].setEnabled(false);
spn_PngX[0].setEnabled(false);
spn_PngY[0].setEnabled(false);
spn_PngZ[0].setEnabled(false);
spn_PngA1[0].setEnabled(false);
spn_PngA2[0].setEnabled(false);
spn_PngA3[0].setEnabled(false);
spn_PngSX[0].setEnabled(false);
spn_PngSY[0].setEnabled(false);
spn_PngA1[0].getParent().update();
updatingPngPictureTab = false;
return;
}
updatingPngPictureTab = true;
txt_PngPath[0].setEnabled(true);
btn_PngFocus[0].setEnabled(true);
btn_PngImage[0].setEnabled(true);
spn_PngX[0].setEnabled(true);
spn_PngY[0].setEnabled(true);
spn_PngZ[0].setEnabled(true);
spn_PngA1[0].setEnabled(true);
spn_PngA2[0].setEnabled(true);
spn_PngA3[0].setEnabled(true);
spn_PngSX[0].setEnabled(true);
spn_PngSY[0].setEnabled(true);
txt_PngPath[0].setText(png.texturePath);
txt_PngPath[0].setToolTipText(png.texturePath);
spn_PngX[0].setValue(png.offset.X);
spn_PngY[0].setValue(png.offset.Y);
spn_PngZ[0].setValue(png.offset.Z);
spn_PngA1[0].setValue(png.angleA);
spn_PngA2[0].setValue(png.angleB);
spn_PngA3[0].setValue(png.angleC);
spn_PngSX[0].setValue(png.scale.X);
spn_PngSY[0].setValue(png.scale.Y);
spn_PngA1[0].getParent().update();
updatingPngPictureTab = false;
return;
}
}
}
public void unselectAddSubfile() {
resetAddState();
btn_AddPrimitive[0].setSelection(false);
setAddingSubfiles(false);
setAddingSomething(false);
}
public DatFile createNewDatFile(Shell sh, OpenInWhat where) {
FileDialog fd = new FileDialog(sh, SWT.SAVE);
fd.setText("Create a new *.dat file"); //$NON-NLS-1$ I18N Needs translation!
if ("project".equals(Project.getProjectPath())) { //$NON-NLS-1$
try {
String path = LDPartEditor.class.getProtectionDomain().getCodeSource().getLocation().getPath();
String decodedPath = URLDecoder.decode(path, "UTF-8"); //$NON-NLS-1$
decodedPath = decodedPath.substring(0, decodedPath.length() - 4);
fd.setFilterPath(decodedPath + "project"); //$NON-NLS-1$
} catch (Exception consumed) {
fd.setFilterPath(Project.getProjectPath());
}
} else {
fd.setFilterPath(Project.getProjectPath());
}
String[] filterExt = { "*.dat", "*.*" }; //$NON-NLS-1$ //$NON-NLS-2$
fd.setFilterExtensions(filterExt);
String[] filterNames = { "LDraw Source File (*.dat)", "All Files" }; //$NON-NLS-1$ //$NON-NLS-2$ I18N Needs translation!
fd.setFilterNames(filterNames);
while (true) {
String selected = fd.open();
System.out.println(selected);
if (selected != null) {
// Check if its already created
DatFile df = new DatFile(selected);
if (isFileNameAllocated(selected, df, true)) {
MessageBox messageBox = new MessageBox(getShell(), SWT.ICON_ERROR | SWT.RETRY | SWT.CANCEL);
messageBox.setText(I18n.DIALOG_AlreadyAllocatedNameTitle);
messageBox.setMessage(I18n.DIALOG_AlreadyAllocatedName);
int result = messageBox.open();
if (result == SWT.CANCEL) {
break;
}
} else {
TreeItem ti = new TreeItem(this.treeItem_ProjectParts[0], SWT.NONE);
StringBuilder nameSb = new StringBuilder(new File(df.getNewName()).getName());
nameSb.append("(new file)"); //$NON-NLS-1$ I18N
ti.setText(nameSb.toString());
ti.setData(df);
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences = (ArrayList<DatFile>) this.treeItem_ProjectParts[0].getData();
cachedReferences.add(df);
Project.addUnsavedFile(df);
updateTree_renamedEntries();
updateTree_unsavedEntries();
openDatFile(df, where, null);
return df;
}
} else {
break;
}
}
return null;
}
public DatFile openDatFile(Shell sh, OpenInWhat where) {
FileDialog fd = new FileDialog(sh, SWT.OPEN);
fd.setText("Open *.dat file"); //$NON-NLS-1$ I18N Needs translation!
if ("project".equals(Project.getProjectPath())) { //$NON-NLS-1$
try {
String path = LDPartEditor.class.getProtectionDomain().getCodeSource().getLocation().getPath();
String decodedPath = URLDecoder.decode(path, "UTF-8"); //$NON-NLS-1$
decodedPath = decodedPath.substring(0, decodedPath.length() - 4);
fd.setFilterPath(decodedPath + "project"); //$NON-NLS-1$
} catch (Exception consumed) {
fd.setFilterPath(Project.getProjectPath());
}
} else {
fd.setFilterPath(Project.getProjectPath());
}
String[] filterExt = { "*.dat", "*.*" }; //$NON-NLS-1$ //$NON-NLS-2$
fd.setFilterExtensions(filterExt);
String[] filterNames = { "LDraw Source File (*.dat)", "All Files" }; //$NON-NLS-1$ //$NON-NLS-2$ I18N Needs translation!
fd.setFilterNames(filterNames);
String selected = fd.open();
System.out.println(selected);
if (selected != null) {
// Check if its already created
DatType type = DatType.PART;
DatFile df = new DatFile(selected);
DatFile original = isFileNameAllocated2(selected, df);
if (original == null) {
// Type Check and Description Parsing!!
StringBuilder titleSb = new StringBuilder();
UTF8BufferedReader reader = null;
File f = new File(selected);
try {
reader = new UTF8BufferedReader(f.getAbsolutePath());
String title = reader.readLine();
if (title != null) {
title = title.trim();
if (title.length() > 0) {
titleSb.append(" -"); //$NON-NLS-1$
titleSb.append(title.substring(1));
}
}
while (true) {
String typ = reader.readLine();
if (typ != null) {
typ = typ.trim();
if (!typ.startsWith("0")) { //$NON-NLS-1$
break;
} else {
int i1 = typ.indexOf("!LDRAW_ORG"); //$NON-NLS-1$
if (i1 > -1) {
int i2;
i2 = typ.indexOf("Subpart"); //$NON-NLS-1$
if (i2 > -1 && i1 < i2) {
type = DatType.SUBPART;
break;
}
i2 = typ.indexOf("Part"); //$NON-NLS-1$
if (i2 > -1 && i1 < i2) {
type = DatType.PART;
break;
}
i2 = typ.indexOf("48_Primitive"); //$NON-NLS-1$
if (i2 > -1 && i1 < i2) {
type = DatType.PRIMITIVE48;
break;
}
i2 = typ.indexOf("Primitive"); //$NON-NLS-1$
if (i2 > -1 && i1 < i2) {
type = DatType.PRIMITIVE;
break;
}
}
}
} else {
break;
}
}
} catch (LDParsingException e) {
} catch (FileNotFoundException e) {
} catch (UnsupportedEncodingException e) {
} finally {
try {
if (reader != null)
reader.close();
} catch (LDParsingException e1) {
}
}
df = new DatFile(selected, titleSb.toString(), false, type);
df.setProjectFile(df.getNewName().startsWith(Project.getProjectPath()));
} else {
df.setProjectFile(df.getNewName().startsWith(Project.getProjectPath()));
if (original.isProjectFile()) {
openDatFile(df, where, null);
return df;
}
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences = (ArrayList<DatFile>) this.treeItem_ProjectParts[0].getData();
if (cachedReferences.contains(df)) {
openDatFile(df, where, null);
return df;
}
}
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences = (ArrayList<DatFile>) this.treeItem_ProjectSubparts[0].getData();
if (cachedReferences.contains(df)) {
openDatFile(df, where, null);
return df;
}
}
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences = (ArrayList<DatFile>) this.treeItem_ProjectPrimitives[0].getData();
if (cachedReferences.contains(df)) {
openDatFile(df, where, null);
return df;
}
}
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences = (ArrayList<DatFile>) this.treeItem_ProjectPrimitives48[0].getData();
if (cachedReferences.contains(df)) {
openDatFile(df, where, null);
return df;
}
}
type = original.getType();
df = original;
}
TreeItem ti;
switch (type) {
case PART:
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences = (ArrayList<DatFile>) this.treeItem_ProjectParts[0].getData();
cachedReferences.add(df);
}
ti = new TreeItem(this.treeItem_ProjectParts[0], SWT.NONE);
break;
case SUBPART:
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences = (ArrayList<DatFile>) this.treeItem_ProjectSubparts[0].getData();
cachedReferences.add(df);
}
ti = new TreeItem(this.treeItem_ProjectSubparts[0], SWT.NONE);
break;
case PRIMITIVE:
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences = (ArrayList<DatFile>) this.treeItem_ProjectPrimitives[0].getData();
cachedReferences.add(df);
}
ti = new TreeItem(this.treeItem_ProjectPrimitives[0], SWT.NONE);
break;
case PRIMITIVE48:
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences = (ArrayList<DatFile>) this.treeItem_ProjectPrimitives48[0].getData();
cachedReferences.add(df);
}
ti = new TreeItem(this.treeItem_ProjectPrimitives48[0], SWT.NONE);
break;
default:
{
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences = (ArrayList<DatFile>) this.treeItem_ProjectParts[0].getData();
cachedReferences.add(df);
}
ti = new TreeItem(this.treeItem_ProjectParts[0], SWT.NONE);
break;
}
StringBuilder nameSb = new StringBuilder(new File(df.getNewName()).getName());
nameSb.append("(new file)"); //$NON-NLS-1$ I18N
ti.setText(nameSb.toString());
ti.setData(df);
updateTree_unsavedEntries();
openDatFile(df, where, null);
return df;
}
return null;
}
public boolean openDatFile(DatFile df, OpenInWhat where, EditorTextWindow tWin) {
if (where == OpenInWhat.EDITOR_3D || where == OpenInWhat.EDITOR_TEXT_AND_3D) {
if (renders.isEmpty()) {
if ("%EMPTY%".equals(Editor3DWindow.getSashForm().getChildren()[1].getData())) { //$NON-NLS-1$
int[] mainSashWeights = Editor3DWindow.getSashForm().getWeights();
Editor3DWindow.getSashForm().getChildren()[1].dispose();
CompositeContainer cmp_Container = new CompositeContainer(Editor3DWindow.getSashForm(), false);
cmp_Container.moveBelow(Editor3DWindow.getSashForm().getChildren()[0]);
df.parseForData();
final VertexManager vm = df.getVertexManager();
Project.setFileToEdit(df);
cmp_Container.getComposite3D().setLockableDatFileReference(df);
vm.zoomToFit(cmp_Container.getComposite3D());
Editor3DWindow.getSashForm().getParent().layout();
Editor3DWindow.getSashForm().setWeights(mainSashWeights);
}
} else {
boolean canUpdate = false;
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (!c3d.isDatFileLockedOnDisplay()) {
canUpdate = true;
break;
}
}
if (canUpdate) {
final VertexManager vm = df.getVertexManager();
if (vm.isModified()) {
df.setText(df.getText());
}
df.parseForData();
Project.setFileToEdit(df);
for (OpenGLRenderer renderer : renders) {
Composite3D c3d = renderer.getC3D();
if (!c3d.isDatFileLockedOnDisplay()) {
c3d.setLockableDatFileReference(df);
vm.zoomToFit(c3d);
}
}
}
}
}
if (where == OpenInWhat.EDITOR_TEXT || where == OpenInWhat.EDITOR_TEXT_AND_3D) {
for (EditorTextWindow w : Project.getOpenTextWindows()) {
for (CTabItem t : w.getTabFolder().getItems()) {
if (df.equals(((CompositeTab) t).getState().getFileNameObj())) {
w.getTabFolder().setSelection(t);
((CompositeTab) t).getControl().getShell().forceActive();
w.open();
return w == tWin;
}
}
}
if (tWin == null) {
// Project.getParsedFiles().add(df); IS NECESSARY HERE
Project.getParsedFiles().add(df);
new EditorTextWindow().run(df);
}
}
return false;
}
public void disableSelectionTab() {
updatingSelectionTab = true;
txt_Line[0].setText(""); //$NON-NLS-1$
spn_SelectionX1[0].setEnabled(false);
spn_SelectionY1[0].setEnabled(false);
spn_SelectionZ1[0].setEnabled(false);
spn_SelectionX2[0].setEnabled(false);
spn_SelectionY2[0].setEnabled(false);
spn_SelectionZ2[0].setEnabled(false);
spn_SelectionX3[0].setEnabled(false);
spn_SelectionY3[0].setEnabled(false);
spn_SelectionZ3[0].setEnabled(false);
spn_SelectionX4[0].setEnabled(false);
spn_SelectionY4[0].setEnabled(false);
spn_SelectionZ4[0].setEnabled(false);
spn_SelectionX1[0].setValue(BigDecimal.ZERO);
spn_SelectionY1[0].setValue(BigDecimal.ZERO);
spn_SelectionZ1[0].setValue(BigDecimal.ZERO);
spn_SelectionX2[0].setValue(BigDecimal.ZERO);
spn_SelectionY2[0].setValue(BigDecimal.ZERO);
spn_SelectionZ2[0].setValue(BigDecimal.ZERO);
spn_SelectionX3[0].setValue(BigDecimal.ZERO);
spn_SelectionY3[0].setValue(BigDecimal.ZERO);
spn_SelectionZ3[0].setValue(BigDecimal.ZERO);
spn_SelectionX4[0].setValue(BigDecimal.ZERO);
spn_SelectionY4[0].setValue(BigDecimal.ZERO);
spn_SelectionZ4[0].setValue(BigDecimal.ZERO);
lbl_SelectionX1[0].setText(I18n.EDITOR3D_PositionX1);
lbl_SelectionY1[0].setText(I18n.EDITOR3D_PositionY1);
lbl_SelectionZ1[0].setText(I18n.EDITOR3D_PositionZ1);
lbl_SelectionX2[0].setText(I18n.EDITOR3D_PositionX2);
lbl_SelectionY2[0].setText(I18n.EDITOR3D_PositionY2);
lbl_SelectionZ2[0].setText(I18n.EDITOR3D_PositionZ2);
lbl_SelectionX3[0].setText(I18n.EDITOR3D_PositionX3);
lbl_SelectionY3[0].setText(I18n.EDITOR3D_PositionY3);
lbl_SelectionZ3[0].setText(I18n.EDITOR3D_PositionZ3);
lbl_SelectionX4[0].setText(I18n.EDITOR3D_PositionX4);
lbl_SelectionY4[0].setText(I18n.EDITOR3D_PositionY4);
lbl_SelectionZ4[0].setText(I18n.EDITOR3D_PositionZ4);
updatingSelectionTab = false;
}
public static ArrayList<OpenGLRenderer> getRenders() {
return renders;
}
public SearchWindow getSearchWindow() {
return searchWindow;
}
public void setSearchWindow(SearchWindow searchWindow) {
this.searchWindow = searchWindow;
}
private void loadSelectorSettings() {
sels.setColour(mntm_WithSameColour[0].getSelection());
sels.setEdgeStop(mntm_StopAtEdges[0].getSelection());
sels.setHidden(mntm_WithHiddenData[0].getSelection());
sels.setNoSubfiles(mntm_ExceptSubfiles[0].getSelection());
sels.setOrientation(mntm_WithSameOrientation[0].getSelection());
sels.setDistance(mntm_WithAccuracy[0].getSelection());
sels.setWholeSubfiles(mntm_WithWholeSubfiles[0].getSelection());
sels.setVertices(mntm_SVertices[0].getSelection());
sels.setLines(mntm_SLines[0].getSelection());
sels.setTriangles(mntm_STriangles[0].getSelection());
sels.setQuads(mntm_SQuads[0].getSelection());
sels.setCondlines(mntm_SCLines[0].getSelection());
}
private boolean isFileNameAllocated(String dir, DatFile df, boolean createNew) {
TreeItem[] folders = new TreeItem[12];
folders[0] = treeItem_OfficialParts[0];
folders[1] = treeItem_OfficialPrimitives[0];
folders[2] = treeItem_OfficialPrimitives48[0];
folders[3] = treeItem_OfficialSubparts[0];
folders[4] = treeItem_UnofficialParts[0];
folders[5] = treeItem_UnofficialPrimitives[0];
folders[6] = treeItem_UnofficialPrimitives48[0];
folders[7] = treeItem_UnofficialSubparts[0];
folders[8] = treeItem_ProjectParts[0];
folders[9] = treeItem_ProjectPrimitives[0];
folders[10] = treeItem_ProjectPrimitives48[0];
folders[11] = treeItem_ProjectSubparts[0];
for (TreeItem folder : folders) {
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences =(ArrayList<DatFile>) folder.getData();
for (DatFile d : cachedReferences) {
if (createNew || !df.equals(d)) {
if (dir.equals(d.getOldName()) || dir.equals(d.getNewName())) {
return true;
}
}
}
}
return false;
}
private DatFile isFileNameAllocated2(String dir, DatFile df) {
TreeItem[] folders = new TreeItem[12];
folders[0] = treeItem_OfficialParts[0];
folders[1] = treeItem_OfficialPrimitives[0];
folders[2] = treeItem_OfficialPrimitives48[0];
folders[3] = treeItem_OfficialSubparts[0];
folders[4] = treeItem_UnofficialParts[0];
folders[5] = treeItem_UnofficialPrimitives[0];
folders[6] = treeItem_UnofficialPrimitives48[0];
folders[7] = treeItem_UnofficialSubparts[0];
folders[8] = treeItem_ProjectParts[0];
folders[9] = treeItem_ProjectPrimitives[0];
folders[10] = treeItem_ProjectPrimitives48[0];
folders[11] = treeItem_ProjectSubparts[0];
for (TreeItem folder : folders) {
@SuppressWarnings("unchecked")
ArrayList<DatFile> cachedReferences =(ArrayList<DatFile>) folder.getData();
for (DatFile d : cachedReferences) {
if (dir.equals(d.getOldName()) || dir.equals(d.getNewName())) {
return d;
}
}
}
return null;
}
}
| Fixed a bug from commit e5b35e52cf | src/org/nschmidt/ldparteditor/shells/editor3d/Editor3DWindow.java | Fixed a bug from commit e5b35e52cf |
|
Java | epl-1.0 | b43f001300a4ea7b47649c8b07fa0f522cc0902b | 0 | NicholasII/sdas,NicholasII/sdas | package com.iscas.sdas.controller.data;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.servlet.ModelAndView;
import com.iscas.sdas.dto.TableInfoDto;
import com.iscas.sdas.dto.work.AllCapacityWorkDto;
import com.iscas.sdas.dto.work.AllOutServerDto;
import com.iscas.sdas.service.CommonService;
import com.iscas.sdas.service.WorkService;
import com.iscas.sdas.service.work.OutServerService;
import com.iscas.sdas.util.CommonUntils;
import com.iscas.sdas.util.Constraints;
import com.iscas.sdas.util.FileImport;
import tasks.realtime.CellUploadFileOfExpertTask;
import tasks.realtime.CellUploadFileTask;
@Controller
@RequestMapping("/data")
public class DataController {
@Autowired
WorkService workService;
@Autowired
CommonService commonService;
@Autowired
OutServerService outServerService;
@RequestMapping("/online")
public ModelAndView online(){
return new ModelAndView("/data/online");
}
@RequestMapping("/offline")
public ModelAndView offline(){
ModelAndView modelAndView = new ModelAndView("/data/offline");
modelAndView.addObject("success", Constraints.RESULT_UNKNOWN);
return modelAndView;
}
@RequestMapping("/test")
public ModelAndView test(HttpServletRequest request) {
ModelAndView modelAndView = new ModelAndView("data/offline");
String path = "/home/hadoop/systempdata/test_rtdata_net.csv";
System.err.println(path);
CellUploadFileTask.doUploadFileWork(path);
System.err.println("task over!");
return modelAndView;
}
/**
* 文件上传
* @param request
* @return
*/
@RequestMapping("/upload")
public ModelAndView upload(HttpServletRequest request) {
ModelAndView modelAndView = new ModelAndView("data/offline");
String type = request.getParameter("type");
if ("network".equals(type)) {
String time = request.getParameter("time");
List<String> paths = null;
try {
paths = CommonUntils.MultipleFilesUpload(request);
} catch (Exception e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
modelAndView.addObject("success", Constraints.RESULT_FAIL+ ":上传失败!");
}
if (paths!=null && paths.size() > 0) {
try {
String[] args = new String[2];
args[0] = paths.get(0);
args[1] = time;
new CellUploadFileOfExpertTask().runTask(args);
//CellUploadFileTask.doUploadFileWork(paths.get(0));
//CellUploadFileOfExpertTask.doUploadFileWork(paths.get(0));
modelAndView.addObject("success", Constraints.RESULT_SUCCESS);
} catch (Exception e) {
e.printStackTrace();
modelAndView.addObject("success", Constraints.RESULT_FAIL+ ":调用后台方法失败!");
}
}
} else if ("capacity".equals(type)) {
String tablename = "t_performance_work";
List<TableInfoDto> tableInfoDtos = commonService.tableindex(tablename);
List<AllCapacityWorkDto> performanceWorkDtos = new ArrayList<>();
List<String> paths = null;
try {
paths = CommonUntils.MultipleFilesUpload(request);
} catch (Exception e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
modelAndView.addObject("success", Constraints.RESULT_FAIL+ ":上传失败!");
}
if (paths != null && paths.size() > 0) {
if (tableInfoDtos != null && tableInfoDtos.size() > 0) {
int rows = FileImport.tablerows(paths.get(0));
for (int i = 0; i < rows; i++) {
AllCapacityWorkDto workDto = new AllCapacityWorkDto();
performanceWorkDtos.add(workDto);
}
try {
FileImport.importwork(paths.get(0), performanceWorkDtos, tableInfoDtos);// 将excel映射为对象
workService.clearPerformanceWork(); // 清空表
workService.insertPerformanceWork(performanceWorkDtos);// 插入表并将questionflag置为-1
modelAndView.addObject("success", Constraints.RESULT_SUCCESS);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
modelAndView.addObject("success", Constraints.RESULT_FAIL + ":文件损坏!");
}
}
}
} else if ("fault".equals(type)) {
} else if ("complaint".equals(type)) {
} else if ("outservice".equals(type)) {
String tablename = "t_wireless_retirement";
List<TableInfoDto> tableInfoDtos = commonService.tableindex(tablename);
List<AllOutServerDto> osWorkDtos = new ArrayList<>();
List<String> paths = CommonUntils.MultipleFilesUpload(request);
if (paths != null && paths.size() > 0) {
if (tableInfoDtos != null && tableInfoDtos.size() > 0) {
int rows = FileImport.tablerows(paths.get(0));
for (int i = 0; i < rows; i++) {
AllOutServerDto workDto = new AllOutServerDto();
osWorkDtos.add(workDto);
}
try {
FileImport.importwork(paths.get(0), osWorkDtos, tableInfoDtos);// 将excel映射为对象
outServerService.clearOSWork(); // 清空表
outServerService.insertOSWork(osWorkDtos);
modelAndView.addObject("success", Constraints.RESULT_SUCCESS);
} catch (Exception e) {
e.printStackTrace();
modelAndView.addObject("success", Constraints.RESULT_FAIL);
}
}
}
}
return modelAndView;
}
}
| src/main/java/com/iscas/sdas/controller/data/DataController.java | package com.iscas.sdas.controller.data;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.servlet.ModelAndView;
import com.github.pagehelper.PageHelper;
import com.github.pagehelper.PageInfo;
import com.iscas.sdas.common.PageDto;
import com.iscas.sdas.dto.TableInfoDto;
import com.iscas.sdas.dto.work.AllCapacityWorkDto;
import com.iscas.sdas.dto.work.AllOutServerDto;
import com.iscas.sdas.service.CommonService;
import com.iscas.sdas.service.WorkService;
import com.iscas.sdas.service.work.OutServerService;
import com.iscas.sdas.util.CommonUntils;
import com.iscas.sdas.util.Constraints;
import com.iscas.sdas.util.FileImport;
import tasks.realtime.CellUploadFileOfExpertTask;
import tasks.realtime.CellUploadFileTask;
@Controller
@RequestMapping("/data")
public class DataController {
@Autowired
WorkService workService;
@Autowired
CommonService commonService;
@Autowired
OutServerService outServerService;
@RequestMapping("/online")
public ModelAndView online(){
return new ModelAndView("/data/online");
}
@RequestMapping("/offline")
public ModelAndView offline(){
ModelAndView modelAndView = new ModelAndView("/data/offline");
modelAndView.addObject("success", Constraints.RESULT_UNKNOWN);
return modelAndView;
}
@RequestMapping("/test")
public ModelAndView test(HttpServletRequest request) {
ModelAndView modelAndView = new ModelAndView("data/offline");
String path = "/home/hadoop/systempdata/test_rtdata_net.csv";
System.err.println(path);
CellUploadFileTask.doUploadFileWork(path);
System.err.println("task over!");
return modelAndView;
}
/**
* 文件上传
* @param request
* @return
*/
@RequestMapping("/upload")
public ModelAndView upload(HttpServletRequest request) {
ModelAndView modelAndView = new ModelAndView("data/offline");
String type = request.getParameter("type");
if ("network".equals(type)) {
String time = request.getParameter("time");
List<String> paths = null;
try {
paths = CommonUntils.MultipleFilesUpload(request);
} catch (Exception e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
modelAndView.addObject("success", Constraints.RESULT_FAIL+ ":上传失败!");
}
if (paths!=null && paths.size() > 0) {
try {
String[] args = new String[2];
args[0] = paths.get(0);
args[1] = time;
new CellUploadFileOfExpertTask().runTask(args);
//CellUploadFileTask.doUploadFileWork(paths.get(0));
//CellUploadFileOfExpertTask.doUploadFileWork(paths.get(0));
modelAndView.addObject("success", Constraints.RESULT_SUCCESS);
} catch (Exception e) {
e.printStackTrace();
modelAndView.addObject("success", Constraints.RESULT_FAIL+ ":调用后台方法失败!");
}
}
} else if ("capacity".equals(type)) {
String tablename = "t_performance_work";
List<TableInfoDto> tableInfoDtos = commonService.tableindex(tablename);
List<AllCapacityWorkDto> performanceWorkDtos = new ArrayList<>();
List<String> paths = null;
try {
paths = CommonUntils.MultipleFilesUpload(request);
} catch (Exception e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
modelAndView.addObject("success", Constraints.RESULT_FAIL+ ":上传失败!");
}
if (paths != null && paths.size() > 0) {
if (tableInfoDtos != null && tableInfoDtos.size() > 0) {
int rows = FileImport.tablerows(paths.get(0));
for (int i = 0; i < rows; i++) {
AllCapacityWorkDto workDto = new AllCapacityWorkDto();
performanceWorkDtos.add(workDto);
}
try {
FileImport.importwork(paths.get(0), performanceWorkDtos, tableInfoDtos);// 将excel映射为对象
workService.clearPerformanceWork(); // 清空表
workService.insertPerformanceWork(performanceWorkDtos);// 插入表并将questionflag置为-1
modelAndView.addObject("success", Constraints.RESULT_SUCCESS);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
modelAndView.addObject("success", Constraints.RESULT_FAIL + ":文件损坏!");
}
}
}
} else if ("fault".equals(type)) {
} else if ("complaint".equals(type)) {
} else if ("outservice".equals(type)) {
String tablename = "t_wireless_retirement";
List<TableInfoDto> tableInfoDtos = commonService.tableindex(tablename);
List<AllOutServerDto> osWorkDtos = new ArrayList<>();
List<String> paths = CommonUntils.MultipleFilesUpload(request);
if (paths != null && paths.size() > 0) {
if (tableInfoDtos != null && tableInfoDtos.size() > 0) {
int rows = FileImport.tablerows(paths.get(0));
for (int i = 0; i < rows; i++) {
AllOutServerDto workDto = new AllOutServerDto();
osWorkDtos.add(workDto);
}
try {
FileImport.importwork(paths.get(0), osWorkDtos, tableInfoDtos);// 将excel映射为对象
outServerService.clearOSWork(); // 清空表
outServerService.insertOSWork(osWorkDtos);
modelAndView.addObject("success", Constraints.RESULT_SUCCESS);
} catch (Exception e) {
e.printStackTrace();
modelAndView.addObject("success", Constraints.RESULT_FAIL);
}
}
}
}
return modelAndView;
}
}
| commit
| src/main/java/com/iscas/sdas/controller/data/DataController.java | commit |
|
Java | agpl-3.0 | 591588a4cab7c5e288e540f1116a3a17609eae25 | 0 | RapidInfoSys/Rapid,RapidInfoSys/Rapid,RapidInfoSys/Rapid,RapidInfoSys/Rapid | /*
Copyright (C) 2015 - Gareth Edwards / Rapid Information Systems
[email protected]
This file is part of the Rapid Application Platform
RapidSOA is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version. The terms require you
to include the original copyright, and the license notice in all redistributions.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
in a file named "COPYING". If not, see <http://www.gnu.org/licenses/>.
*/
package com.rapid.forms;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpSession;
import org.json.JSONObject;
import com.rapid.core.Application;
import com.rapid.core.Control;
import com.rapid.core.Page;
import com.rapid.security.SecurityAdapter;
import com.rapid.security.SecurityAdapter.SecurityAdapaterException;
import com.rapid.server.Rapid;
import com.rapid.server.RapidRequest;
public class RapidFormAdapter extends FormAdapter {
// constructor
public RapidFormAdapter(ServletContext servletContext, Application application) {
super(servletContext, application);
}
// class methods
protected Map<String,FormPageControlValues> getUserFormPageControlValues(RapidRequest rapidRequest) {
// get the user session
HttpSession session = rapidRequest.getRequest().getSession();
// get all app page control values from session
Map<String,Map<String,FormPageControlValues>> userAppPageControlValues = (Map<String, Map<String, FormPageControlValues>>) session.getAttribute("userFormPageControlValues");
// if null
if (userAppPageControlValues == null) {
// instantiate
userAppPageControlValues = new HashMap<String, Map<String, FormPageControlValues>>();
// add to session
session.setAttribute("userFormPageControlValues", userAppPageControlValues);
}
// get the form id
String formId = getFormId(rapidRequest);
// the page controls for specified app
Map<String,FormPageControlValues> userPageControlValues = userAppPageControlValues.get(formId);
// if null, instantiate
if (userPageControlValues == null) {
// instantiate
userPageControlValues = new HashMap<String, FormPageControlValues>();
// add to user app pages
userAppPageControlValues.put(formId, userPageControlValues);
}
// return!
return userPageControlValues;
}
// overridden methods
@Override
public String getFormId(RapidRequest rapidRequest) {
// get the user session (making a new one if need be)
HttpSession session = rapidRequest.getRequest().getSession();
// retrieve the form ids from the session
Map<String,String> formIds = (Map<String, String>) session.getAttribute("userFormIds");
// instantiate if null
if (formIds == null) formIds = new HashMap<String, String>();
// get the application
Application application = rapidRequest.getApplication();
// get the form id based on the app id and version
String formId = formIds.get(application.getId() + "-" + application.getVersion());
// if it's null
if (formId == null) {
// get the start page header
String startPageId = application.getPages().getSortedPages().get(0).getId();
// get the requested Page
Page requestPage = rapidRequest.getPage();
// get the request page id
String requestPageId = null;
// if there was a page get the id
if (requestPage != null) requestPageId = requestPage.getId();
// assume no new id
boolean newId = false;
// if this is the start page
if (startPageId.equals(requestPageId)) {
// we're ok to hand out a new id
newId = true;
} else {
// get the security adapter
SecurityAdapter security = application.getSecurityAdapter();
// if the user has design
try {
if (security.checkUserRole(rapidRequest, Rapid.DESIGN_ROLE)) newId = true;
} catch (SecurityAdapaterException e) {}
}
// there are some rules for creating new form ids - there must be no action and the page must be the start page
if (rapidRequest.getRequest().getParameter("action") == null && newId) {
// get the servlet context
ServletContext servletContext = rapidRequest.getRapidServlet().getServletContext();
// the maste form id as a string
String nextFormIdString = (String) servletContext.getAttribute("nextFormId");
// if null set to "0"
if (nextFormIdString == null) nextFormIdString = "0";
// add 1 to the master form id
formId = Integer.toString(Integer.parseInt( nextFormIdString ) + 1);
// retain master
servletContext.setAttribute("nextFormId", formId);
// put into form ids
formIds.put(application.getId() + "-" + application.getVersion(), formId);
// retain for user
session.setAttribute("userFormIds",formIds);
}
}
return formId;
}
@Override
public FormPageControlValues getFormPageControlValues(RapidRequest rapidRequest, String pageId) {
// retrieve
return getUserFormPageControlValues(rapidRequest).get(pageId);
}
@Override
public void setFormPageControlValues(RapidRequest rapidRequest, String pageId, FormPageControlValues pageControlValues) {
// if there are controls to store
if (pageControlValues.size() > 0) {;
// store them
getUserFormPageControlValues(rapidRequest).put(pageId, pageControlValues);
}
}
@Override
public String getFormPageControlValue(RapidRequest rapidRequest, String pageId, String controlId) {
// get all user form page values
Map<String,FormPageControlValues> userFormPageControlValues = getUserFormPageControlValues(rapidRequest);
// if there are control values stored
if (userFormPageControlValues.size() > 0) {
// look for values from our page
FormPageControlValues pageControlValues = userFormPageControlValues.get(pageId);
// if we have some
if (pageControlValues != null) {
// loop them
for (FormControlValue controlValue : pageControlValues) {
// look for an id match
if (controlValue.getId().equals(controlId)) return controlValue.getValue();
}
}
}
return null;
}
@Override
public String getFormControlValue(RapidRequest rapidRequest, String controlId) {
// split the controlid
String[] controlIdParts = controlId.split("_");
// use the parts to access the page and control id
return getFormPageControlValue(rapidRequest, controlIdParts[0], controlId);
}
@Override
public String getSummaryStartHtml(RapidRequest rapidRequest, Application application) {
return "<h1 class='formSummaryTitle'>Form summary</h1>\n";
}
@Override
public String getSummaryEndHtml(RapidRequest rapidRequest, Application application) {
return "";
}
@Override
public String getSummaryPageStartHtml(RapidRequest rapidRequest, Application application, Page page) {
String label = page.getLabel();
if (label == null) {
label = page.getTitle();
} else {
if (label.trim().length() == 0) label = page.getTitle();
}
return "<div class='formSummaryPage'><h2>" + label + "</h2>\n";
}
@Override
public String getSummaryPageEndHtml(RapidRequest rapidRequest, Application application, Page page) {
return "</div>\n";
}
@Override
public String getSummaryControlValueHtml(RapidRequest rapidRequest, Application application, Page page, FormControlValue controlValue) {
Control control = page.getControl(controlValue.getId());
if (control == null) {
return "control " + controlValue.getId() + " cannot be found";
} else {
String label = control.getLabel();
if (label == null) {
return "";
} else {
String value = controlValue.getValue();
// check for nulls
if (value == null) value = "(no value)";
// check for json
if (value.startsWith("{") && value.endsWith("}")) {
try {
JSONObject jsonValue = new JSONObject(value);
value = jsonValue.optString("text");
} catch (Exception ex) {}
}
return "<span class='formSummaryControl'>" + label + " : " + control.getCodeText(value) + "</span>\n";
}
}
}
@Override
public String getSummaryPagesEndHtml(RapidRequest rapidRequest, Application application) {
return "";
}
@Override
public void submitForm(RapidRequest rapidRequest) throws Exception {
// get the user session
HttpSession session = rapidRequest.getRequest().getSession();
// retrieve the form ids from the session
Map<String,String> formIds = (Map<String, String>) session.getAttribute("userFormIds");
// get the application
Application application = rapidRequest.getApplication();
// null check
if (formIds != null) {
// empty the form id - invalidating the form
formIds.put(application.getId() + "-" + application.getVersion(), null);
}
// for a "real" form you would either write to your database form header record that it has been submitted
}
}
| src/com/rapid/forms/RapidFormAdapter.java | /*
Copyright (C) 2015 - Gareth Edwards / Rapid Information Systems
[email protected]
This file is part of the Rapid Application Platform
RapidSOA is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version. The terms require you
to include the original copyright, and the license notice in all redistributions.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
in a file named "COPYING". If not, see <http://www.gnu.org/licenses/>.
*/
package com.rapid.forms;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpSession;
import org.json.JSONObject;
import com.rapid.core.Application;
import com.rapid.core.Control;
import com.rapid.core.Page;
import com.rapid.security.SecurityAdapter;
import com.rapid.security.SecurityAdapter.SecurityAdapaterException;
import com.rapid.server.Rapid;
import com.rapid.server.RapidRequest;
public class RapidFormAdapter extends FormAdapter {
// constructor
public RapidFormAdapter(ServletContext servletContext, Application application) {
super(servletContext, application);
}
// class methods
protected Map<String,FormPageControlValues> getUserFormPageControlValues(RapidRequest rapidRequest) {
// get the user session
HttpSession session = rapidRequest.getRequest().getSession();
// get all app page control values from session
Map<String,Map<String,FormPageControlValues>> userAppPageControlValues = (Map<String, Map<String, FormPageControlValues>>) session.getAttribute("userFormPageControlValues");
// if null
if (userAppPageControlValues == null) {
// instantiate
userAppPageControlValues = new HashMap<String, Map<String, FormPageControlValues>>();
// add to session
session.setAttribute("userFormPageControlValues", userAppPageControlValues);
}
// get the form id
String formId = getFormId(rapidRequest);
// the page controls for specified app
Map<String,FormPageControlValues> userPageControlValues = userAppPageControlValues.get(formId);
// if null, instantiate
if (userPageControlValues == null) {
// instantiate
userPageControlValues = new HashMap<String, FormPageControlValues>();
// add to user app pages
userAppPageControlValues.put(formId, userPageControlValues);
}
// return!
return userPageControlValues;
}
// overridden methods
@Override
public String getFormId(RapidRequest rapidRequest) {
// get the user session (making a new one if need be)
HttpSession session = rapidRequest.getRequest().getSession();
// retrieve the form ids from the session
Map<String,String> formIds = (Map<String, String>) session.getAttribute("userFormIds");
// instantiate if null
if (formIds == null) formIds = new HashMap<String, String>();
// get the application
Application application = rapidRequest.getApplication();
// get the form id based on the app id and version
String formId = formIds.get(application.getId() + "-" + application.getVersion());
// if it's null
if (formId == null) {
// get the start page header
String startPageId = application.getPages().getSortedPages().get(0).getId();
// get the request page id
String requestPageId = rapidRequest.getPage().getId();
// assume no new id
boolean newId = false;
// if this is the start page
if (startPageId.equals(requestPageId)) {
// we're ok to hand out a new id
newId = true;
} else {
// get the security adapter
SecurityAdapter security = application.getSecurityAdapter();
// if the user has design
try {
if (security.checkUserRole(rapidRequest, Rapid.DESIGN_ROLE)) newId = true;
} catch (SecurityAdapaterException e) {}
}
// there are some rules for creating new form ids - there must be no action and the page must be the start page
if (rapidRequest.getRequest().getParameter("action") == null && newId) {
// get the servlet context
ServletContext servletContext = rapidRequest.getRapidServlet().getServletContext();
// the maste form id as a string
String nextFormIdString = (String) servletContext.getAttribute("nextFormId");
// if null set to "0"
if (nextFormIdString == null) nextFormIdString = "0";
// add 1 to the master form id
formId = Integer.toString(Integer.parseInt( nextFormIdString ) + 1);
// retain master
servletContext.setAttribute("nextFormId", formId);
// put into form ids
formIds.put(application.getId() + "-" + application.getVersion(), formId);
// retain for user
session.setAttribute("userFormIds",formIds);
}
}
return formId;
}
@Override
public FormPageControlValues getFormPageControlValues(RapidRequest rapidRequest, String pageId) {
// retrieve
return getUserFormPageControlValues(rapidRequest).get(pageId);
}
@Override
public void setFormPageControlValues(RapidRequest rapidRequest, String pageId, FormPageControlValues pageControlValues) {
// if there are controls to store
if (pageControlValues.size() > 0) {;
// store them
getUserFormPageControlValues(rapidRequest).put(pageId, pageControlValues);
}
}
@Override
public String getFormPageControlValue(RapidRequest rapidRequest, String pageId, String controlId) {
// get all user form page values
Map<String,FormPageControlValues> userFormPageControlValues = getUserFormPageControlValues(rapidRequest);
// if there are control values stored
if (userFormPageControlValues.size() > 0) {
// look for values from our page
FormPageControlValues pageControlValues = userFormPageControlValues.get(pageId);
// if we have some
if (pageControlValues != null) {
// loop them
for (FormControlValue controlValue : pageControlValues) {
// look for an id match
if (controlValue.getId().equals(controlId)) return controlValue.getValue();
}
}
}
return null;
}
@Override
public String getFormControlValue(RapidRequest rapidRequest, String controlId) {
// split the controlid
String[] controlIdParts = controlId.split("_");
// use the parts to access the page and control id
return getFormPageControlValue(rapidRequest, controlIdParts[0], controlId);
}
@Override
public String getSummaryStartHtml(RapidRequest rapidRequest, Application application) {
return "<h1 class='formSummaryTitle'>Form summary</h1>\n";
}
@Override
public String getSummaryEndHtml(RapidRequest rapidRequest, Application application) {
return "";
}
@Override
public String getSummaryPageStartHtml(RapidRequest rapidRequest, Application application, Page page) {
String label = page.getLabel();
if (label == null) {
label = page.getTitle();
} else {
if (label.trim().length() == 0) label = page.getTitle();
}
return "<div class='formSummaryPage'><h2>" + label + "</h2>\n";
}
@Override
public String getSummaryPageEndHtml(RapidRequest rapidRequest, Application application, Page page) {
return "</div>\n";
}
@Override
public String getSummaryControlValueHtml(RapidRequest rapidRequest, Application application, Page page, FormControlValue controlValue) {
Control control = page.getControl(controlValue.getId());
if (control == null) {
return "control " + controlValue.getId() + " cannot be found";
} else {
String label = control.getLabel();
if (label == null) {
return "";
} else {
String value = controlValue.getValue();
// check for nulls
if (value == null) value = "(no value)";
// check for json
if (value.startsWith("{") && value.endsWith("}")) {
try {
JSONObject jsonValue = new JSONObject(value);
value = jsonValue.optString("text");
} catch (Exception ex) {}
}
return "<span class='formSummaryControl'>" + label + " : " + control.getCodeText(value) + "</span>\n";
}
}
}
@Override
public String getSummaryPagesEndHtml(RapidRequest rapidRequest, Application application) {
return "";
}
@Override
public void submitForm(RapidRequest rapidRequest) throws Exception {
// get the user session
HttpSession session = rapidRequest.getRequest().getSession();
// retrieve the form ids from the session
Map<String,String> formIds = (Map<String, String>) session.getAttribute("userFormIds");
// get the application
Application application = rapidRequest.getApplication();
// null check
if (formIds != null) {
// empty the form id - invalidating the form
formIds.put(application.getId() + "-" + application.getVersion(), null);
}
// for a "real" form you would either write to your database form header record that it has been submitted
}
}
| Protection for non-existing page in form id request. | src/com/rapid/forms/RapidFormAdapter.java | Protection for non-existing page in form id request. |
|
Java | agpl-3.0 | cf8a60eae24ce9e923e48b4568ae5761c9b0d22c | 0 | schedulix/schedulix,schedulix/schedulix,schedulix/schedulix,schedulix/schedulix | /*
Copyright (c) 2000-2013 "independIT Integrative Technologies GmbH",
Authors: Ronald Jeninga, Dieter Stubler
schedulix Enterprise Job Scheduling System
independIT Integrative Technologies GmbH [http://www.independit.de]
mailto:[email protected]
This file is part of schedulix
schedulix is free software:
you can redistribute it and/or modify it under the terms of the
GNU Affero General Public License as published by the
Free Software Foundation, either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package de.independit.scheduler.server;
import java.io.*;
import java.util.*;
import java.lang.*;
import java.net.*;
import java.sql.*;
import java.math.*;
import de.independit.scheduler.server.exception.*;
import de.independit.scheduler.server.locking.*;
import de.independit.scheduler.server.output.*;
import de.independit.scheduler.server.parser.*;
import de.independit.scheduler.server.repository.*;
import de.independit.scheduler.server.util.*;
public class SchedulingThread extends InternalSession
{
public final static String name = "SchedulingThread";
private boolean needSched;
private boolean needReSched;
private long priorityDelay;
private prioComparator pc;
private long timeoutWakeup;
private long lastSchedule;
private Locklist publl = null;
private Vector<Long> resourceRequestList = null;
private final Object resourceRequestLock = new Object();
private final Integer lock = new Integer(0);
private Vector<Long> actualRequestList;
public static final int CREATE = 1;
public static final int ALTER = 2;
public static final int DELETE = 3;
public static final int REGISTER = 4;
public static final int DEREGISTER = 5;
public static final int SUSPEND = 6;
public static final int RESUME = 7;
public static final int SHUTDOWN = 8;
public static final int FINISH = 9;
public static final int STATECHANGE = 10;
public static final int COPY = 11;
public static final int IGNORE_RESOURCE = 12;
public static final int MOVE = 13;
public static final int PRIORITY = 14;
public static final int ALTER_REQAMOUNT = 15;
public static final int OFFLINE_ONLINE = 16;
public static final int RERUN = 17;
public static final int SUBMIT = 18;
public static final int MAX_PRIORITY = 0;
public static final int DEFAULT_PRIORITY = 50;
public static final int MIN_PRIORITY = 100;
public final static int FP_SCOPE = 0;
public final static int FP_FOLDER = 1;
public final static int FP_LOCAL = 2;
private final static Integer ONE = new Integer(1);
public long envhit = 0;
public long envmiss = 0;
private long timer = 0;
private java.util.Date dts = new java.util.Date();
public SchedulingThread(SystemEnvironment env, SyncFifo f)
throws SDMSException
{
super(name);
NR = 1234321;
initThread(env, f, NR, name, SystemEnvironment.scheduleWakeupInterval*1000);
priorityDelay = SystemEnvironment.priorityDelay;
priorityDelay *= 60000;
needSched = true;
needReSched = true;
if(pc == null)
pc = new prioComparator(env, priorityDelay);
timeoutWakeup = Long.MAX_VALUE;
lastSchedule = 0;
}
protected Node getNode(int m)
{
if(m == INITIALIZE) return new DoSchedule(DoSchedule.INITIALIZE);
return new DoSchedule();
}
public int getDynPriority(SystemEnvironment sysEnv, SDMSSubmittedEntity sme)
throws SDMSException
{
prioComparator myPc = new prioComparator(sysEnv, priorityDelay);
myPc.setNow();
return myPc.dynPrio(sme);
}
public boolean isBlocked(SystemEnvironment sysEnv, Long smeId, Long rId)
throws SDMSException
{
Reservator r;
synchronized(lock) {
if(publl == null) return false;
r = publl.get(rId, smeId);
}
if(r == null) return false;
return (r.seq > 1 ? true : false);
}
public void addToRequestList(SystemEnvironment sysEnv, Long smeId)
{
synchronized (resourceRequestLock) {
if (sysEnv.tx.resourceRequestList == null)
sysEnv.tx.resourceRequestList = new Vector<Long>();
sysEnv.tx.resourceRequestList.add(smeId);
}
}
public void publishRequestList(SystemEnvironment sysEnv)
{
if (sysEnv.tx.resourceRequestList == null) return;
addToRequestList(sysEnv.tx.resourceRequestList);
}
private void addToRequestList(Vector v)
{
synchronized (resourceRequestLock) {
if (resourceRequestList == null)
resourceRequestList = new Vector<Long>();
resourceRequestList.addAll(v);
}
}
private Vector getRequestList()
{
Vector retval;
synchronized (resourceRequestLock) {
retval = resourceRequestList;
resourceRequestList = null;
}
return (retval == null ? new Vector<Long>() : retval);
}
private void processRequestList(SystemEnvironment sysEnv)
throws SDMSException
{
Vector<Long> v = getRequestList();
actualRequestList = v;
try {
for (int i = 0; i < v.size(); ++i) {
SDMSSubmittedEntity sme;
Long smeId = v.get(i);
Integer oldState;
int os;
try {
sme = SDMSSubmittedEntityTable.getObjectForUpdate(sysEnv, smeId);
oldState = sme.getOldState(sysEnv);
if (oldState == null) {
continue;
}
os = oldState.intValue();
} catch (NotFoundException nfe) {
continue;
}
int state = sme.getState(sysEnv).intValue();
if (state == SDMSSubmittedEntity.DEPENDENCY_WAIT) {
requestSyncSme(sysEnv, sme, oldState.intValue());
if (sme.getState(sysEnv).intValue() != SDMSSubmittedEntity.ERROR) {
if (os == SDMSSubmittedEntity.SUBMITTED ||
os == SDMSSubmittedEntity.DEPENDENCY_WAIT ||
os == SDMSSubmittedEntity.ERROR ||
os == SDMSSubmittedEntity.UNREACHABLE)
sme.checkDependencies(sysEnv);
else
sme.setState(sysEnv, SDMSSubmittedEntity.SYNCHRONIZE_WAIT);
}
} else {
if (state == SDMSSubmittedEntity.SYNCHRONIZE_WAIT) {
reevaluateJSAssignment(sysEnv, sme);
requestSysSme(sysEnv, sme);
} else {
}
}
needSched = true;
}
} catch (SDMSException e) {
throw e;
}
}
protected void scheduleProtected(SystemEnvironment sysEnv)
throws SDMSException
{
try {
schedule(sysEnv);
} catch (Throwable e) {
if (e instanceof SerializationException) {
if (actualRequestList != null) {
addToRequestList(actualRequestList);
}
throw e;
} else {
StringWriter stackTrace = new StringWriter();
e.printStackTrace(new PrintWriter(stackTrace));
doTrace(sysEnv.cEnv, "Schedule threw an exception; server will abort " + e.toString() + ':' + e.getMessage() + "\n" + stackTrace.toString(), SEVERITY_FATAL);
System.exit(1);
}
}
actualRequestList = null;
}
private void schedule(SystemEnvironment sysEnv)
throws SDMSException
{
dts = new java.util.Date();
timer = dts.getTime();
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
processRequestList(sysEnv);
if(needReSched) {
doTrace(cEnv, "==============> Start Resource Rescheduling <=================\nStartTime = 0", SEVERITY_MESSAGE);
SDMSnpJobFootprintTable.table.clearTableUnlocked(sysEnv);
reschedule(sysEnv);
doTrace(cEnv, "==============> End Resource Rescheduling <=================\nEndTime = " + (dts.getTime() - timer), SEVERITY_MESSAGE);
needSched = true;
}
if(!needSched) {
long ts = dts.getTime() - timeoutWakeup;
if((ts < 0) && (timer < lastSchedule + 10000 )) {
return;
}
}
lastSchedule = timer;
Locklist resourceChain = new Locklist();
doTrace(cEnv, "---------------> Start Synchronize Resource Scheduling <-------------------\nStartTime = " + (dts.getTime() - timer), SEVERITY_MESSAGE);
needSched = false;
syncSchedule(sysEnv, resourceChain);
doTrace(cEnv, "---------------> Start System Resource Scheduling <-------------------\nStartTime = " + (dts.getTime() - timer), SEVERITY_MESSAGE);
resourceSchedule(sysEnv, resourceChain);
synchronized(lock) {
publl = resourceChain;
}
doTrace(cEnv, "---------------> Start Cleanup LifeTables <-------------------\nStartTime = " + (dts.getTime() - timer), SEVERITY_MESSAGE);
long purgeLow = sysEnv.roTxList.first(sysEnv);
doTrace(cEnv, "purgeLow = " + purgeLow, SEVERITY_MESSAGE);
doTrace(cEnv, "purgeSetSize = " + sysEnv.nvPurgeSet.size(), SEVERITY_MESSAGE);
sysEnv.nvPurgeSet.purge(sysEnv, purgeLow);
doTrace(cEnv, "---------------> End Resource Scheduling <-------------------\nEndTime = " + (dts.getTime() - timer), SEVERITY_MESSAGE);
}
public boolean getNextJobSchedule(SystemEnvironment sysEnv)
throws SDMSException
{
if(needReSched)
return false;
if (sysEnv.maxWriter > 1) {
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
if(needReSched)
return false;
}
HashSet myGroups = new HashSet();
myGroups.add(SDMSObject.adminGId);
sysEnv.cEnv.pushGid(sysEnv, myGroups);
sysEnv.cEnv.setUser();
try {
scheduleProtected(sysEnv);
} finally {
sysEnv.cEnv.popGid(sysEnv);
sysEnv.cEnv.setJobServer();
}
return true;
}
public boolean getPoolSchedule(SystemEnvironment sysEnv)
throws SDMSException
{
if(needReSched)
return false;
if (sysEnv.maxWriter > 1) {
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
if(needReSched)
return false;
}
scheduleProtected(sysEnv);
return true;
}
private void reschedule(SystemEnvironment sysEnv)
throws SDMSException
{
pc.setNow();
needReSched = false;
Vector sv = SDMSScopeTable.idx_type.getVectorForUpdate(sysEnv, new Integer(SDMSScope.SERVER));
Vector rjv = SDMSSubmittedEntityTable.idx_state.getVectorForUpdate(sysEnv, new Integer(SDMSSubmittedEntity.RUNNABLE), null, Integer.MAX_VALUE);
doTrace(cEnv, "Number of Runnable Jobs found: " + rjv.size(), SEVERITY_MESSAGE);
doTrace(cEnv, "==============> Rescheduling Runnables <=================\nStartTime = " + (dts.getTime() - timer), SEVERITY_MESSAGE);
rescheduleVector(sysEnv, rjv, sv, SDMSSubmittedEntity.RUNNABLE);
doTrace(cEnv, "==============> Rescheduling Resource Wait <=================\nStartTime = " + (dts.getTime() - timer), SEVERITY_MESSAGE);
Vector smev = SDMSSubmittedEntityTable.idx_state.getVectorForUpdate(sysEnv, new Integer(SDMSSubmittedEntity.RESOURCE_WAIT), null, Integer.MAX_VALUE);
doTrace(cEnv, "Number of Jobs in Resource Wait found: " + smev.size(), SEVERITY_MESSAGE);
rescheduleVector(sysEnv, smev, sv, SDMSSubmittedEntity.RESOURCE_WAIT);
doTrace(cEnv, "==============> Rescheduling Synchronize Wait <=================\nStartTime = " + (dts.getTime() - timer), SEVERITY_MESSAGE);
smev = SDMSSubmittedEntityTable.idx_state.getVectorForUpdate(sysEnv, new Integer(SDMSSubmittedEntity.SYNCHRONIZE_WAIT), null, Integer.MAX_VALUE);
doTrace(cEnv, "Number of Jobs in Synchronize Wait found: " + smev.size(), SEVERITY_MESSAGE);
rescheduleVector(sysEnv, smev, sv, SDMSSubmittedEntity.SYNCHRONIZE_WAIT);
doTrace(cEnv, "==============> Rescheduling Dependency Wait <=================\nStartTime = " + (dts.getTime() - timer), SEVERITY_MESSAGE);
smev = SDMSSubmittedEntityTable.idx_state.getVector(sysEnv, new Integer(SDMSSubmittedEntity.DEPENDENCY_WAIT), null, Integer.MAX_VALUE);
doTrace(cEnv, "Number of Jobs in Dependency Wait found: " + smev.size(), SEVERITY_MESSAGE);
rescheduleVector(sysEnv, smev, sv, SDMSSubmittedEntity.DEPENDENCY_WAIT);
}
private void rescheduleVector(SystemEnvironment sysEnv, Vector smev, Vector sv, int maxState)
throws SDMSException
{
SDMSSubmittedEntity sme;
SDMSSchedulingEntity se;
SDMSResourceAllocation ra;
Long smeId;
boolean suspended;
Locklist ll = new Locklist();
long actVersion;
Collections.sort(smev, pc);
for(int i = 0; i < smev.size(); i++) {
sme = (SDMSSubmittedEntity) smev.get(i);
actVersion = sme.getSeVersion(sysEnv).longValue();
se = SDMSSchedulingEntityTable.getObject(sysEnv, sme.getSeId(sysEnv), actVersion);
if(se.getType(sysEnv).intValue() != SDMSSchedulingEntity.JOB) continue;
smeId = sme.getId(sysEnv);
if(sme.getIsSuspended(sysEnv).intValue() != SDMSSubmittedEntity.NOSUSPEND || sme.getParentSuspended(sysEnv).intValue() > 0)
suspended = true;
else
suspended = false;
Vector v = SDMSRunnableQueueTable.idx_smeId.getVectorForUpdate(sysEnv, smeId);
for(int j = 0; j < v.size(); j++) {
((SDMSRunnableQueue) v.get(j)).delete(sysEnv);
}
v = SDMSResourceAllocationTable.idx_smeId.getVectorForUpdate(sysEnv, smeId);
for(int j = 0; j < v.size(); j++) {
ra = (SDMSResourceAllocation) v.get(j);
int allocType = ra.getAllocationType(sysEnv).intValue();
if(allocType != SDMSResourceAllocation.ALLOCATION &&
allocType != SDMSResourceAllocation.IGNORE &&
!ra.getIsSticky(sysEnv).booleanValue()) {
ra.delete(sysEnv, false, true);
}
}
requestSyncSme(sysEnv, sme, SDMSSubmittedEntity.DEPENDENCY_WAIT);
if(sme.getState(sysEnv).intValue() == SDMSSubmittedEntity.ERROR)
continue;
reevaluateJSAssignment(sysEnv, sme);
if(sme.getState(sysEnv).intValue() == SDMSSubmittedEntity.ERROR)
continue;
if(maxState == SDMSSubmittedEntity.RUNNABLE ||
maxState == SDMSSubmittedEntity.RESOURCE_WAIT ||
maxState == SDMSSubmittedEntity.SYNCHRONIZE_WAIT) {
requestSysSme(sysEnv, sme);
}
if((maxState == SDMSSubmittedEntity.RUNNABLE ||
maxState == SDMSSubmittedEntity.RESOURCE_WAIT) ||
(maxState == SDMSSubmittedEntity.SYNCHRONIZE_WAIT && !suspended))
syncScheduleSme(sysEnv, sme, ll);
if(maxState == SDMSSubmittedEntity.RUNNABLE ||
(sme.getState(sysEnv).intValue() == SDMSSubmittedEntity.RESOURCE_WAIT && !suspended))
resourceScheduleSme(sysEnv, sme, ll);
}
}
public void syncSchedule(SystemEnvironment sysEnv, Locklist resourceChain)
throws SDMSException
{
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
SDMSSubmittedEntity sme;
int i;
Vector smev = SDMSSubmittedEntityTable.idx_state.getVectorForUpdate(sysEnv, new Integer(SDMSSubmittedEntity.SYNCHRONIZE_WAIT), null, Integer.MAX_VALUE);
Vector sv = SDMSScopeTable.idx_type.getVector(sysEnv, new Integer(SDMSScope.SERVER));
doTrace(cEnv, "Number of Job Server : " + sv.size(), SEVERITY_DEBUG);
doTrace(cEnv, "Number of Jobs in SYNCHRONIZE_WAIT : " + smev.size(), SEVERITY_DEBUG);
if(sv.size() == 0) {
return;
}
timeoutWakeup = Long.MAX_VALUE;
pc.setNow();
Collections.sort(smev, pc);
for(i = 0; i < smev.size(); ++i) {
sme = (SDMSSubmittedEntity) smev.get(i);
if(sme.getIsSuspended(sysEnv).intValue() != SDMSSubmittedEntity.NOSUSPEND ||
sme.getParentSuspended(sysEnv).intValue() > 0 ||
sme.getOldState(sysEnv) != null)
continue;
syncScheduleSme(sysEnv, sme, resourceChain);
}
}
public void requestSyncSme(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, int oldState)
throws SDMSException
{
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
long actVersion = sme.getSeVersion(sysEnv).longValue();
SDMSSchedulingEntity se = SDMSSchedulingEntityTable.getObject(sysEnv, sme.getSeId(sysEnv), actVersion);
if(se.getType(sysEnv).intValue() != SDMSSchedulingEntity.JOB) return;
Vector sv = getServerList(sysEnv, sme, se, actVersion);
requestResourceSme(sysEnv, sme, se, sv, SDMSNamedResource.SYNCHRONIZING, actVersion, oldState);
sme.setOldState(sysEnv, null);
}
public void requestSysSme(SystemEnvironment sysEnv, SDMSSubmittedEntity sme)
throws SDMSException
{
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
long actVersion = sme.getSeVersion(sysEnv).longValue();
SDMSSchedulingEntity se = SDMSSchedulingEntityTable.getObject(sysEnv, sme.getSeId(sysEnv), actVersion);
if(se.getType(sysEnv).intValue() != SDMSSchedulingEntity.JOB) return;
Vector sv = findRelevantJobserver (sysEnv, sme);
requestResourceSme(sysEnv, sme, se, sv, SDMSNamedResource.SYSTEM, actVersion, SDMSSubmittedEntity.SYNCHRONIZE_WAIT);
sme.setOldState(sysEnv, null);
}
private Vector getServerList(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, SDMSSchedulingEntity se, long actVersion)
throws SDMSException
{
Vector cacheEntry;
Vector result = null;
Long validFrom;
Long validTo;
if (sysEnv.tx.envJSMap == null)
sysEnv.tx.envJSMap = new HashMap();
HashMap envJSMap = sysEnv.tx.envJSMap;
Long envId = se.getNeId(sysEnv);
cacheEntry = (Vector) envJSMap.get(envId);
if (cacheEntry != null) {
for (int i = 0; i < cacheEntry.size(); ++i) {
Vector v = (Vector) cacheEntry.get(i);
validFrom = (Long) v.get(0);
validTo = (Long) v.get(1);
if (validFrom.longValue() < actVersion && validTo.longValue() >= actVersion) {
result = (Vector) v.get(2);
++envhit;
break;
}
}
}
if (cacheEntry == null || result == null) {
++envmiss;
Vector envv = SDMSEnvironmentTable.idx_neId.getVector(sysEnv, envId, actVersion);
SDMSNamedEnvironment ne = SDMSNamedEnvironmentTable.getObject(sysEnv, envId, actVersion);
validFrom = new Long(ne.getValidFrom(sysEnv));
validTo = new Long(ne.getValidTo(sysEnv));
result = SDMSScopeTable.idx_type.getVectorForUpdate(sysEnv, new Integer(SDMSScope.SERVER));
Iterator i = result.iterator();
while (i.hasNext()) {
SDMSScope s = (SDMSScope) i.next();
if (!s.getIsRegistered(sysEnv).booleanValue()) {
i.remove();
continue;
}
Long sId = s.getId(sysEnv);
SDMSnpSrvrSRFootprint npsfp = SDMSnpSrvrSRFootprintTable.idx_sId_getUniqueForUpdate(sysEnv, sId);
HashMap sfp = npsfp.getFp(sysEnv);
for (int j = 0; j < envv.size(); ++j) {
SDMSEnvironment env = (SDMSEnvironment) envv.get(j);
Long nrId = env.getNrId(sysEnv);
if(!sfp.containsKey(nrId)) {
i.remove();
break;
}
SDMSResource r = SDMSResourceTable.getObjectForUpdate(sysEnv, (Long) sfp.get(nrId));
if(!r.getIsOnline(sysEnv).booleanValue()) {
i.remove();
break;
}
}
}
Vector v = new Vector();
v.add(validFrom);
v.add(validTo);
v.add(result);
if (cacheEntry == null) cacheEntry = new Vector();
cacheEntry.add(v);
envJSMap.put(envId, cacheEntry);
}
return result;
}
public static void allocateAndReleaseResources(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, SDMSScope s)
throws SDMSException
{
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, sysEnv.sched, ObjectLock.EXCLUSIVE);
Long rId;
Long nrId;
Long srId;
SDMSResource r, sr;
SDMSResourceAllocation ra;
SDMSnpJobFootprint npjfp;
try {
npjfp = SDMSnpJobFootprintTable.idx_smeId_getUnique(sysEnv, sme.getId(sysEnv));
} catch (NotFoundException nfe) {
getJobFootprint(sysEnv, sme);
npjfp = SDMSnpJobFootprintTable.idx_smeId_getUnique(sysEnv, sme.getId(sysEnv));
}
HashMap fpLocal = npjfp.getFpLocal(sysEnv);
HashMap fpFolder = npjfp.getFpFolder(sysEnv);
SDMSnpSrvrSRFootprint npsfp = SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, s.getId(sysEnv));
HashMap sfp = npsfp.getFp(sysEnv);
Vector v = SDMSResourceAllocationTable.idx_smeId.getVectorForUpdate(sysEnv, sme.getId(sysEnv));
for(int i = 0; i < v.size(); i++) {
ra = (SDMSResourceAllocation) v.get(i);
rId = ra.getRId(sysEnv);
r = SDMSResourceTable.getObjectForUpdate(sysEnv, rId);
nrId = r.getNrId(sysEnv);
if(ra.getAllocationType(sysEnv).intValue() == SDMSResourceAllocation.RESERVATION) {
if(fpFolder.containsKey(nrId) || fpLocal.containsKey(nrId)) {
ra.setAllocationType(sysEnv, new Integer(SDMSResourceAllocation.ALLOCATION));
} else {
srId = (Long) sfp.get(nrId);
sr = SDMSResourceTable.getObjectForUpdate(sysEnv, srId);
if(sr.getId(sysEnv).equals(rId)) {
ra.setAllocationType(sysEnv, new Integer(SDMSResourceAllocation.ALLOCATION));
}
}
if (ra.getIsSticky(sysEnv).booleanValue()) {
}
}
if (ra.getAllocationType(sysEnv).intValue() != SDMSResourceAllocation.ALLOCATION) ra.delete(sysEnv, true, true);
}
SystemEnvironment.sched.needSched = true;
}
private void requestResourceSme(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, SDMSSchedulingEntity se, Vector sv, int type, long actVersion, int oldState)
throws SDMSException
{
SDMSScope s;
HashMap masterMap = new HashMap();
Long smeId = sme.getId(sysEnv);
Integer smeState = sme.getState(sysEnv);
boolean fitsSomewhere = false;
HashMap smefp = (HashMap) getJobFootprint(sysEnv, sme).get(FP_SCOPE);
if(sme.getState(sysEnv).intValue() == SDMSSubmittedEntity.ERROR)
return;
Vector jsv = new Vector();
Vector v;
if(checkKeptResources(sysEnv, smeId, jsv, sv)) v = jsv;
else v = sv;
jsv = new Vector();
if (checkStickyResources(sysEnv, smeId, smefp, jsv, v)) v = jsv;
try {
requestLocalResourceSme(sysEnv, sme, type, masterMap, oldState);
requestFolderResourceSme(sysEnv, sme, type, masterMap, oldState);
} catch (SDMSEscape e) {
sme.setToError(sysEnv, "Sticky Resource resolution conflict (resource to allocate not visible)");
Long tMasterId = sme.getMasterId(sysEnv);
SDMSSubmittedEntity tMsme = SDMSSubmittedEntityTable.getObject(sysEnv, tMasterId);
tMsme.suspend(sysEnv, false, false);
}
for(int j = 0; j < v.size(); ++j) {
s = (SDMSScope) v.get(j);
if (!s.getIsRegistered(sysEnv).booleanValue()) continue;
Long sId = s.getId(sysEnv);
if(!s.canExecute(sysEnv, sme))
continue;
SDMSnpSrvrSRFootprint npsfp = SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, s.getId(sysEnv));
HashMap sfp = npsfp.getFp(sysEnv);
sysEnv.tx.beginSubTransaction(sysEnv);
try {
if(fits(sysEnv, sfp, smefp, sme, false, null)) {
requestResources(sysEnv, sme, se, actVersion, sfp, type, smefp, masterMap, oldState);
try {
SDMSRunnableQueue rq = SDMSRunnableQueueTable.idx_smeId_scopeId_getUniqueForUpdate(sysEnv, new SDMSKey(smeId, sId));
rq.setState(sysEnv, smeState);
} catch (NotFoundException nfe) {
SDMSRunnableQueueTable.table.create(sysEnv, smeId, sId, smeState);
}
fitsSomewhere = true;
}
} catch (SDMSEscape e) {
sysEnv.tx.rollbackSubTransaction(sysEnv);
continue;
} catch (Exception e) {
doTrace(cEnv, ": Job " + smeId + " run into an Exception during Resource Scheduling : " + e.toString(), SEVERITY_WARNING);
sysEnv.tx.rollbackSubTransaction(sysEnv);
throw e;
}
sysEnv.tx.commitSubTransaction(sysEnv);
}
if (type == SDMSNamedResource.SYNCHRONIZING) {
Iterator it = masterMap.values().iterator();
while (it.hasNext()) {
Vector rabv = (Vector) it.next();
for (int vi = 0; vi < rabv.size(); ++vi) {
Vector rab = (Vector) rabv.get(vi);
if (((Boolean) rab.get(1)).booleanValue())
continue;
((SDMSResourceAllocation) rab.get(0)).cleanupStickyGroup(sysEnv);
}
}
}
if(!fitsSomewhere) {
sme.setToError(sysEnv, "Job cannot run in any scope because of resource shortage");
for (int j = 0; j < v.size(); ++j) {
s = (SDMSScope) v.get(j);
Long sId = s.getId(sysEnv);
if (!s.getIsRegistered(sysEnv).booleanValue()) {
continue;
}
if(!s.canExecute(sysEnv, sme)) {
continue;
}
SDMSnpSrvrSRFootprint npsfp = SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, s.getId(sysEnv));
HashMap sfp = npsfp.getFp(sysEnv);
verboseFits(sysEnv, sfp, smefp, sme, false, null);
}
}
}
public void reevaluateJSAssignment(SystemEnvironment sysEnv, SDMSSubmittedEntity sme)
throws SDMSException
{
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
SDMSScope s;
Long smeId = sme.getId(sysEnv);
boolean fitsSomewhere = false;
HashMap smefp = (HashMap) getJobFootprint(sysEnv, sme).get(FP_SCOPE);
doTrace(cEnv, ": Job " + sme.getId(sysEnv) + " is re-evaluated", SEVERITY_DEBUG);
long actVersion = sme.getSeVersion(sysEnv).longValue();
SDMSSchedulingEntity se = SDMSSchedulingEntityTable.getObject(sysEnv, sme.getSeId(sysEnv), actVersion);
if(se.getType(sysEnv).intValue() != SDMSSchedulingEntity.JOB) return;
final Vector rqv = SDMSRunnableQueueTable.idx_smeId.getVectorForUpdate(sysEnv, smeId);
final Vector sv = new Vector();
for(int j = 0; j < rqv.size(); j++) {
SDMSRunnableQueue rq = (SDMSRunnableQueue) rqv.get(j);
s = SDMSScopeTable.getObject(sysEnv, rq.getScopeId(sysEnv));
sv.add(s);
doTrace(cEnv, ": added scope id " + s.getId(sysEnv), SEVERITY_DEBUG);
}
doTrace(cEnv, ": we found " + sv.size() + " potential servers", SEVERITY_DEBUG);
for(int j = 0; j < sv.size(); ++j) {
s = (SDMSScope) sv.get(j);
Long sId = s.getId(sysEnv);
doTrace(cEnv, ": testing server " + sId, SEVERITY_DEBUG);
SDMSnpSrvrSRFootprint npsfp = SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, s.getId(sysEnv));
HashMap sfp = npsfp.getFp(sysEnv);
if(s.getIsRegistered(sysEnv).booleanValue() && fits(sysEnv, sfp, smefp, sme, true, s)) {
doTrace(cEnv, ": seems to fit *****************", SEVERITY_DEBUG);
fitsSomewhere = true;
} else {
doTrace(cEnv, ": doesn't seem to fit -+-+-+-+-+-+-+-+-", SEVERITY_DEBUG);
doTrace(cEnv, ": deleting [" + s.getId(sysEnv) + ", " + smeId + "]", SEVERITY_DEBUG);
try {
SDMSRunnableQueue rq = SDMSRunnableQueueTable.idx_smeId_scopeId_getUnique(sysEnv, new SDMSKey(smeId, sId));
rq.delete(sysEnv);
} catch (NotFoundException nfe) {
}
Iterator i = smefp.keySet().iterator();
while(i.hasNext()) {
Long L = (Long) i.next();
SDMSResource r = SDMSResourceTable.getObject(sysEnv, (Long) sfp.get(L));
Long rId = r.getId(sysEnv);
final Vector rav = SDMSResourceAllocationTable.idx_smeId_nrId.getVector(sysEnv, new SDMSKey(smeId, r.getNrId(sysEnv)));
for (int k = 0; k < rav.size(); ++k) {
SDMSResourceAllocation ra = (SDMSResourceAllocation) rav.get(k);
if (ra.getRId(sysEnv).equals(rId)) {
ra.delete(sysEnv, true, false);
break;
}
}
}
}
}
if(!fitsSomewhere) {
sme.setToError(sysEnv, "Job cannot run in any scope because of resource shortage");
}
}
private void requestFolderResourceSme(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, int type, HashMap masterMap, int oldState)
throws SDMSException
{
SDMSnpJobFootprint jfp;
Long smeId = sme.getId(sysEnv);
try {
jfp = SDMSnpJobFootprintTable.idx_smeId_getUnique(sysEnv, smeId);
HashMap fpFolder = jfp.getFpFolder(sysEnv);
requestLocalFolderResources(sysEnv, sme, smeId, fpFolder, type, masterMap, oldState);
} catch(NotFoundException nfe) {
throw new CommonErrorException(new SDMSMessage(sysEnv, "03501142150", "No footprint found for job $1", smeId));
}
}
private void requestLocalResourceSme(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, int type, HashMap masterMap, int oldState)
throws SDMSException
{
SDMSnpJobFootprint jfp;
Long smeId = sme.getId(sysEnv);
try {
jfp = SDMSnpJobFootprintTable.idx_smeId_getUniqueForUpdate(sysEnv, smeId);
HashMap fpLocal = jfp.getFpLocal(sysEnv);
requestLocalFolderResources(sysEnv, sme, smeId, fpLocal, type, masterMap, oldState);
} catch(NotFoundException nfe) {
throw new CommonErrorException(new SDMSMessage(sysEnv, "03501142151", "No footprint found for job $1", smeId));
}
}
private void requestLocalFolderResources(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, Long smeId, HashMap fp, int type, HashMap masterMap, int oldState)
throws SDMSException
{
Iterator i = fp.values().iterator();
SDMSResourceRequirement rr;
SDMSResource r;
SDMSNamedResource nr;
Long nrId;
while (i.hasNext()) {
Vector v = (Vector) i.next();
rr = (SDMSResourceRequirement) v.get(0);
r = (SDMSResource) v.get(1);
nrId = rr.getNrId(sysEnv);
nr = SDMSNamedResourceTable.getObject(sysEnv, nrId);
if(nr.getUsage(sysEnv).intValue() != type) continue;
if (rr.getIsSticky(sysEnv).booleanValue() && oldState != SDMSSubmittedEntity.SUBMITTED) continue;
createRequest(sysEnv, smeId, rr, r, nrId, type, masterMap);
}
}
private void requestResources(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, SDMSSchedulingEntity se,
long actVersion, HashMap sfp, int type, HashMap smefp, HashMap masterMap, int oldState)
throws SDMSException
{
SDMSProxy proxy;
SDMSNamedResource nr;
SDMSResource r;
SDMSResourceRequirement rr;
Long nrId, smeId;
Iterator i = null;
i = smefp.values().iterator();
smeId = sme.getId(sysEnv);
while(i.hasNext()) {
proxy = (SDMSProxy) i.next();
if(!(proxy instanceof SDMSResourceRequirement)) continue;
rr = (SDMSResourceRequirement) proxy;
if (rr.getIsSticky(sysEnv).booleanValue()) {
if (oldState != SDMSSubmittedEntity.SUBMITTED) continue;
}
nrId = rr.getNrId(sysEnv);
nr = SDMSNamedResourceTable.getObject(sysEnv, nrId);
if(nr.getUsage(sysEnv).intValue() != type) continue;
r = SDMSResourceTable.getObject(sysEnv, (Long) sfp.get(nrId));
createRequest(sysEnv, smeId, rr, r, nrId, type, masterMap);
}
}
private boolean isVisible(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, Long folderId)
throws SDMSException
{
Long seVersion = sme.getSeVersion(sysEnv);
SDMSSchedulingEntity se = SDMSSchedulingEntityTable.getObject(sysEnv, sme.getSeId(sysEnv), seVersion);
Long parentFolderId = se.getFolderId(sysEnv);
SDMSFolder f;
while (parentFolderId != null) {
if (parentFolderId.equals(folderId)) return true;
f = SDMSFolderTable.getObject(sysEnv, parentFolderId, seVersion);
parentFolderId = f.getParentId(sysEnv);
}
return false;
}
private void createRequest(SystemEnvironment sysEnv, Long smeId, SDMSResourceRequirement rr, SDMSResource r, Long nrId, int type, HashMap masterMap)
throws SDMSException
{
Integer lock;
Long rsmpId;
Long rId;
float factor = 1;
Long stickyParentId = null;
if(rr == null) return;
rId = r.getId(sysEnv);
try {
SDMSResourceAllocation ra = SDMSResourceAllocationTable.idx_smeId_rId_stickyName_getUniqueForUpdate(
sysEnv, new SDMSKey(smeId, rId, rr.getStickyName(sysEnv)));
ra.setRefcount(sysEnv, new Integer(ra.getRefcount(sysEnv).intValue() + 1));
return;
} catch (NotFoundException nfe) {
}
if(SDMSResourceAllocationTable.idx_smeId_rId_stickyName.containsKey(sysEnv, new SDMSKey(smeId, nrId, rr.getStickyName(sysEnv)))) return;
if(type == SDMSNamedResource.SYNCHRONIZING) {
lock = rr.getLockmode(sysEnv);
if(lock == null) lock = new Integer(Lockmode.N);
rsmpId = rr.getRsmpId(sysEnv);
Long stickyParentSeId = rr.getStickyParent(sysEnv);
if (rr.getIsSticky(sysEnv).booleanValue()) {
SDMSSubmittedEntity psme = SDMSSubmittedEntityTable.getObject(sysEnv, smeId);
stickyParentId = psme.getMasterId(sysEnv);
if (stickyParentSeId != null) {
Long pId = psme.getParentId(sysEnv);
while (pId != null && !stickyParentSeId.equals(psme.getSeId(sysEnv))) {
psme = SDMSSubmittedEntityTable.getObject(sysEnv, pId);
pId = psme.getParentId(sysEnv);
}
if (pId != null || stickyParentSeId.equals(psme.getSeId(sysEnv))) stickyParentId = psme.getId(sysEnv);
} else {
Long sId = r.getScopeId(sysEnv);
if (!SDMSScopeTable.table.exists(sysEnv, sId)) {
if (SDMSFolderTable.table.exists(sysEnv, sId)) {
Long stickyParentCandidate = smeId;
SDMSSubmittedEntity sme = SDMSSubmittedEntityTable.getObject(sysEnv, smeId);
Long parentId = sme.getParentId(sysEnv);
while (parentId != null) {
sme = SDMSSubmittedEntityTable.getObject(sysEnv, parentId);
if (isVisible(sysEnv, sme, sId))
stickyParentCandidate = parentId;
parentId = sme.getParentId(sysEnv);
}
stickyParentId = stickyParentCandidate;
} else {
stickyParentId = sId;
}
}
}
}
} else {
lock = new Integer(Lockmode.N);
rsmpId = null;
}
Integer reqAmount = new Integer((int) Math.ceil(rr.getAmount(sysEnv).intValue() * factor));
if (rr.getIsSticky(sysEnv).booleanValue()) {
String stickyName = rr.getStickyName(sysEnv);
Long nStickyParentId = new Long(- stickyParentId.longValue());
SDMSKey masterKey = new SDMSKey(nStickyParentId, stickyName, nrId);
Vector ravok = (Vector) masterMap.get(masterKey);
if (ravok == null) {
Vector rav = SDMSResourceAllocationTable.idx_smeId_nrId.getVector(sysEnv, new SDMSKey(nStickyParentId, nrId));
ravok = new Vector();
for (int i = 0; i < rav.size(); ++i) {
SDMSResourceAllocation ra = (SDMSResourceAllocation) rav.get(i);
String raName = ra.getStickyName(sysEnv);
if ((raName == null && stickyName == null) || (raName != null && raName.equals(stickyName))) {
Vector entry = new Vector();
entry.add(ra);
entry.add(Boolean.FALSE);
ravok.add(entry);
}
}
masterMap.put(masterKey, ravok);
}
if (ravok.size() != 0) {
boolean raOK = false;
for (int i = 0; i < ravok.size(); ++i) {
Vector entry = (Vector) ravok.get(i);
SDMSResourceAllocation ra = (SDMSResourceAllocation) entry.get(0);
if (ra.getRId(sysEnv).equals(rId)) {
raOK = true;
entry.set(1, Boolean.TRUE);
int raReqAmount = ra.getOrigAmount(sysEnv).intValue();
int raLockMode = ra.getLockmode(sysEnv).intValue();
if (raReqAmount < reqAmount.intValue()) {
if (ra.getAllocationType(sysEnv).intValue() == SDMSResourceAllocation.MASTER_RESERVATION) {
SDMSSubmittedEntity sme = SDMSSubmittedEntityTable.getObject(sysEnv, smeId);
SDMSSchedulingEntity se = SDMSSchedulingEntityTable.getObject(sysEnv,
sme.getSeId(sysEnv), sme.getSeVersion(sysEnv));
throw new CommonErrorException(new SDMSMessage(sysEnv, "03405261410",
"Invalid amount escalation for already reserved sticky resource $1, job definition $2",
rId, se.pathString(sysEnv)));
}
ra.setOrigAmount(sysEnv, reqAmount);
ra.setAmount(sysEnv, reqAmount);
}
raLockMode &= lock.intValue();
if (raLockMode != ra.getLockmode(sysEnv).intValue()) {
if (ra.getAllocationType(sysEnv).intValue() == SDMSResourceAllocation.MASTER_RESERVATION) {
SDMSSubmittedEntity sme = SDMSSubmittedEntityTable.getObject(sysEnv, smeId);
SDMSSchedulingEntity se = SDMSSchedulingEntityTable.getObject(sysEnv,
sme.getSeId(sysEnv), sme.getSeVersion(sysEnv));
throw new CommonErrorException(new SDMSMessage(sysEnv, "03405261415",
"Invalid lock escalation for already reserved sticky resource $1, job definition $2",
rId, se.pathString(sysEnv)));
}
ra.setLockmode(sysEnv, new Integer(raLockMode));
}
int refCount = ra.getRefcount(sysEnv).intValue();
ra.setRefcount(sysEnv, new Integer(refCount + 1));
break;
}
}
if (!raOK) {
throw new SDMSEscape();
}
} else {
try {
SDMSResourceAllocationTable.table.create(sysEnv,
rId, nStickyParentId, nrId,
reqAmount,
reqAmount,
rr.getKeepMode(sysEnv),
rr.getIsSticky(sysEnv),
rr.getStickyName(sysEnv),
stickyParentId,
new Integer(SDMSResourceAllocation.MASTER_REQUEST),
null,
lock,
ONE);
} catch (DuplicateKeyException dke) {
}
}
}
SDMSResourceAllocationTable.table.create(sysEnv,
rId, smeId, nrId,
reqAmount,
reqAmount,
rr.getKeepMode(sysEnv),
rr.getIsSticky(sysEnv),
rr.getStickyName(sysEnv),
stickyParentId,
new Integer(SDMSResourceAllocation.REQUEST),
rsmpId,
lock,
ONE);
}
private Vector findRelevantJobserver (SystemEnvironment sysEnv, SDMSSubmittedEntity sme)
throws SDMSException
{
Vector sv = new Vector();
Vector rqv = SDMSRunnableQueueTable.idx_smeId.getVectorForUpdate(sysEnv, sme.getId(sysEnv));
for(int j = 0; j < rqv.size(); j++) {
SDMSRunnableQueue rq = (SDMSRunnableQueue) rqv.get(j);
SDMSScope s = SDMSScopeTable.getObject(sysEnv, rq.getScopeId(sysEnv));
sv.add(s);
}
return sv;
}
private void syncScheduleSme(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, Locklist resourceChain)
throws SDMSException
{
Vector rqv;
SDMSScope s;
Long sId;
Long smeId = sme.getId(sysEnv);
boolean resourcesReserved = false;
Vector smefpv = getJobFootprint(sysEnv, sme);
HashMap smefp = (HashMap) smefpv.get(FP_SCOPE);
Vector jsv = new Vector();
Vector v;
long actVersion = sme.getSeVersion(sysEnv).longValue();
SDMSSchedulingEntity se = SDMSSchedulingEntityTable.getObject(sysEnv, sme.getSeId(sysEnv), actVersion);
v = findRelevantJobserver(sysEnv, sme);
final HashMap fpFolder = (HashMap) smefpv.get(FP_FOLDER);
final HashMap fpLocal = (HashMap) smefpv.get(FP_LOCAL);
resourcesReserved = reserveFp(sysEnv, sme, se, resourceChain, fpFolder, SDMSNamedResource.SYNCHRONIZING) &&
reserveFp(sysEnv, sme, se, resourceChain, fpLocal, SDMSNamedResource.SYNCHRONIZING);
if(!resourcesReserved) {
checkTimeout(sysEnv, sme, se, actVersion);
return;
}
resourcesReserved = false;
for(int j = 0; j < v.size(); ++j) {
s = (SDMSScope) v.get(j);
sId = s.getId(sysEnv);
SDMSnpSrvrSRFootprint npsfp = SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, sId);
HashMap sfp = npsfp.getFp(sysEnv);
if(!checkStaticResources(sysEnv, sfp, smefp)) continue;
Iterator rrvi = smefp.values().iterator();
if(reserveSyncResources(sysEnv, sme, se, actVersion, sfp, resourceChain, rrvi)) {
resourcesReserved = true;
SDMSRunnableQueue rq = SDMSRunnableQueueTable.idx_smeId_scopeId_getUniqueForUpdate(sysEnv, new SDMSKey(smeId, sId));
rq.setState(sysEnv, new Integer(SDMSSubmittedEntity.RESOURCE_WAIT));
}
}
if(resourcesReserved) {
Vector rv = SDMSResourceAllocationTable.idx_smeId.getVectorForUpdate(sysEnv, smeId);
for(int i = 0; i < rv.size(); i++) {
SDMSResourceAllocation ra = (SDMSResourceAllocation) rv.get(i);
if(ra.getAllocationType(sysEnv).intValue() == SDMSResourceAllocation.REQUEST) {
SDMSNamedResource nr = SDMSNamedResourceTable.getObject(sysEnv, ra.getNrId(sysEnv));
if(nr.getUsage(sysEnv).intValue() != SDMSNamedResource.SYNCHRONIZING)
continue;
ra.delete(sysEnv, true, true);
}
}
rqv = SDMSRunnableQueueTable.idx_smeId.getVector(sysEnv, smeId);
for(int i = 0; i < rqv.size(); i++) {
SDMSRunnableQueue rq = (SDMSRunnableQueue) rqv.get(i);
if(rq.getState(sysEnv).intValue() != SDMSSubmittedEntity.RESOURCE_WAIT) {
rq.delete(sysEnv);
}
}
sme.setState(sysEnv, new Integer(SDMSSubmittedEntity.RESOURCE_WAIT));
} else {
checkTimeout(sysEnv, sme, se, actVersion);
}
}
private boolean checkKeptResources(SystemEnvironment sysEnv, Long smeId, Vector jsv, Vector sv)
throws SDMSException
{
SDMSScope s;
Vector rav = SDMSResourceAllocationTable.idx_smeId.getVectorForUpdate(sysEnv, smeId);
boolean jsv_used = false;
for(int j = 0; j < rav.size(); j++) {
SDMSResourceAllocation ra = (SDMSResourceAllocation) rav.get(j);
int raAllocationType = ra.getAllocationType(sysEnv).intValue();
if(raAllocationType == SDMSResourceAllocation.ALLOCATION) {
if(!jsv_used) {
for(int i = 0; i < sv.size(); i++) {
s = (SDMSScope) sv.get(i);
SDMSnpSrvrSRFootprint npsfp = SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, s.getId(sysEnv));
HashMap sfp = npsfp.getFp(sysEnv);
if(sfp.containsValue(ra.getRId(sysEnv))) {
jsv.addElement(s);
jsv_used = true;
}
}
} else {
Iterator jsi = jsv.iterator();
while(jsi.hasNext()) {
s = (SDMSScope) jsi.next();
SDMSnpSrvrSRFootprint npsfp = SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, s.getId(sysEnv));
HashMap sfp = npsfp.getFp(sysEnv);
if(!sfp.containsValue(ra.getRId(sysEnv))) {
jsi.remove();
}
}
}
}
}
return jsv_used;
}
private boolean checkStickyResources(SystemEnvironment sysEnv, Long smeId, HashMap smefp, Vector jsv, Vector v)
throws SDMSException
{
Vector rav = SDMSResourceAllocationTable.idx_smeId.getVector(sysEnv, smeId);
SDMSResourceAllocation ra;
Iterator ravi = rav.iterator();
while (ravi.hasNext()) {
ra = (SDMSResourceAllocation) ravi.next();
if (!ra.getIsSticky(sysEnv).booleanValue()) {
ravi.remove();
continue;
}
if (!smefp.containsKey(ra.getNrId(sysEnv))) {
ravi.remove();
continue;
}
}
if (rav.size() == 0) return false;
for (int i = 0; i < v.size(); ++i) {
SDMSScope s = (SDMSScope) v.get(i);
SDMSnpSrvrSRFootprint npsfp = SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, s.getId(sysEnv));
HashMap sfp = npsfp.getFp(sysEnv);
ravi = rav.iterator();
while (ravi.hasNext()) {
ra = (SDMSResourceAllocation) ravi.next();
if(sfp.containsValue(ra.getRId(sysEnv))) {
jsv.addElement(s);
}
}
}
return true;
}
private void checkTimeout(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, SDMSSchedulingEntity se, long actVersion)
throws SDMSException
{
Integer toBase = se.getTimeoutBase(sysEnv);
if(toBase != null) {
long toTime = se.getTimeoutAmount(sysEnv).longValue();
switch(toBase.intValue()) {
case SDMSInterval.MINUTE:
toTime *= SDMSInterval.MINUTE_DUR;
break;
case SDMSInterval.HOUR:
toTime *= SDMSInterval.HOUR_DUR;
break;
case SDMSInterval.DAY:
toTime *= SDMSInterval.DAY_DUR;
break;
case SDMSInterval.WEEK:
toTime *= SDMSInterval.WEEK_DUR;
break;
case SDMSInterval.MONTH:
toTime *= SDMSInterval.MONTH_DUR;
break;
case SDMSInterval.YEAR:
toTime *= SDMSInterval.YEAR_DUR;
break;
}
java.util.Date ldts = new java.util.Date();
long ts = ldts.getTime();
long nextTimeout = sme.getSyncTs(sysEnv).longValue() + toTime;
if(nextTimeout < ts) {
doTrace(cEnv, ": Job " + sme.getId(sysEnv) + " run into timeout", SEVERITY_MESSAGE);
Long esdId, espId;
esdId = se.getTimeoutStateId(sysEnv);
espId = se.getEspId(sysEnv);
SDMSExitState es = SDMSExitStateTable.idx_espId_esdId_getUnique(sysEnv, new SDMSKey(espId, esdId), actVersion);
sme.changeState(sysEnv, esdId, es, sme.getExitCode(sysEnv), "Timeout", null );
} else {
if(nextTimeout < timeoutWakeup) timeoutWakeup = nextTimeout;
}
}
}
public void resourceSchedule(SystemEnvironment sysEnv, Locklist resourceChain)
throws SDMSException
{
SDMSSubmittedEntity sme;
Vector sv;
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
sv = SDMSSubmittedEntityTable.idx_state.getVectorForUpdate(sysEnv, new Integer(SDMSSubmittedEntity.RESOURCE_WAIT), null, Integer.MAX_VALUE);
doTrace(cEnv, "Number of Jobs in RESOURCE_WAIT : " + sv.size(), SEVERITY_DEBUG);
pc.setNow();
Collections.sort(sv, pc);
for(int i = 0; i < sv.size(); ++i) {
sme = (SDMSSubmittedEntity) sv.get(i);
if(sme.getIsSuspended(sysEnv).intValue() != SDMSSubmittedEntity.NOSUSPEND || sme.getParentSuspended(sysEnv).intValue() > 0)
continue;
resourceScheduleSme(sysEnv, sme, resourceChain);
}
}
private void resourceScheduleSme(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, Locklist resourceChain)
throws SDMSException
{
SDMSRunnableQueue rq;
boolean resourcesReserved = false;
Long smeId = sme.getId(sysEnv);
long actVersion = sme.getSeVersion(sysEnv).longValue();
SDMSSchedulingEntity se = SDMSSchedulingEntityTable.getObject(sysEnv, sme.getSeId(sysEnv), actVersion);
final Vector fpv = getJobFootprint(sysEnv, sme);
final HashMap fp = (HashMap) fpv.get(FP_SCOPE);
final HashMap fpFolder = (HashMap) fpv.get(FP_FOLDER);
final HashMap fpLocal = (HashMap) fpv.get(FP_LOCAL);
resourcesReserved = reserveFp(sysEnv, sme, se, resourceChain, fpLocal, SDMSNamedResource.SYSTEM);
if(resourcesReserved)
resourcesReserved = reserveFp(sysEnv, sme, se, resourceChain, fpFolder, SDMSNamedResource.SYSTEM);
if(!resourcesReserved) {
checkTimeout(sysEnv, sme, se, actVersion);
return;
}
resourcesReserved = false;
Vector v = SDMSRunnableQueueTable.idx_smeId.getVectorForUpdate(sysEnv, smeId);
if(v.size() == 0) {
doTrace(cEnv, ": Job " + sme.getId(sysEnv) + " cannot run in any scope", SEVERITY_WARNING);
return;
}
SDMSScope s = null;
for(int j = 0; j < v.size(); ++j) {
rq = (SDMSRunnableQueue) v.get(j);
s = SDMSScopeTable.getObject(sysEnv, rq.getScopeId(sysEnv));
Iterator it = fp.values().iterator();
HashMap sfp = (SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, s.getId(sysEnv))).getFp(sysEnv);
if(reserveSysResources(sysEnv, sme, sfp, resourceChain, it)) {
resourcesReserved = true;
rq.setState(sysEnv, new Integer(SDMSSubmittedEntity.RUNNABLE));
allocateAndReleaseResources(sysEnv, sme, s);
break;
}
}
if(resourcesReserved) {
doTrace(cEnv, ": Job " + smeId + " added to Runnable Queue " + s.getId(sysEnv), SEVERITY_DEBUG);
Vector rv = SDMSResourceAllocationTable.idx_smeId.getVector(sysEnv, smeId);
for(int i = 0; i < rv.size(); i++) {
SDMSResourceAllocation ra = (SDMSResourceAllocation) rv.get(i);
if(ra.getAllocationType(sysEnv).intValue() == SDMSResourceAllocation.REQUEST) {
SDMSNamedResource nr = SDMSNamedResourceTable.getObject(sysEnv, ra.getNrId(sysEnv));
if(nr.getUsage(sysEnv).intValue() != SDMSNamedResource.SYSTEM)
continue;
ra.delete(sysEnv, true, true);
}
}
Vector rqv = SDMSRunnableQueueTable.idx_smeId.getVector(sysEnv, smeId);
for(int i = 0; i < rqv.size(); i++) {
rq = (SDMSRunnableQueue) rqv.get(i);
if(rq.getState(sysEnv).intValue() != SDMSSubmittedEntity.RUNNABLE) {
rq.delete(sysEnv);
}
}
sme.setState(sysEnv, new Integer(SDMSSubmittedEntity.RUNNABLE));
sysEnv.notifier.addJobServerToNotify(s.getId(sysEnv));
} else {
checkTimeout(sysEnv, sme, se, actVersion);
}
}
public static boolean fits(SystemEnvironment sysEnv, HashMap scopeFp, HashMap smeFp, SDMSSubmittedEntity sme, boolean checkCondition, SDMSScope evalScope)
throws SDMSException
{
if (sysEnv.maxWriter > 1 && sysEnv.tx.mode == SDMSTransaction.READWRITE)
LockingSystem.lock(sysEnv, sysEnv.sched, ObjectLock.EXCLUSIVE);
Iterator i = smeFp.keySet().iterator();
while(i.hasNext()) {
Long L = (Long) i.next();
if(!scopeFp.containsKey(L)) {
return false;
}
SDMSResource r = SDMSResourceTable.getObject(sysEnv, (Long) scopeFp.get(L));
Integer sAmount = r.getRequestableAmount(sysEnv);
SDMSResourceRequirement rr;
SDMSEnvironment e;
SDMSProxy p = (SDMSProxy) smeFp.get(L);
if (p instanceof SDMSResourceRequirement) {
rr = (SDMSResourceRequirement) p;
e = null;
} else {
e = (SDMSEnvironment) p;
rr = null;
}
if(checkCondition) {
String condition = (rr == null ? e.getCondition(sysEnv) : rr.getCondition(sysEnv));
if (condition != null) {
final BoolExpr be = new BoolExpr(condition);
try {
if (! be.checkCondition(sysEnv, r, sme, sme, null, null, evalScope)) {
return false;
}
} catch (CommonErrorException cee) {
if (sysEnv.tx.mode == SDMSTransaction.READWRITE) {
SDMSNamedResource nr;
if (rr != null) {
nr = SDMSNamedResourceTable.getObject(sysEnv, rr.getNrId(sysEnv));
} else {
nr = SDMSNamedResourceTable.getObject(sysEnv, e.getNrId(sysEnv));
}
String msg = cee.toString() + " evaluating the condition for resource " + nr.pathString(sysEnv);
}
return false;
}
}
}
Integer jAmount;
if(rr == null) jAmount = new Integer(0);
else jAmount = rr.getAmount(sysEnv);
if(sAmount == null)
continue;
if(jAmount.compareTo(sAmount) > 0) {
return false;
}
}
return true;
}
private static boolean verboseFits(SystemEnvironment sysEnv, HashMap scopeFp, HashMap smeFp, SDMSSubmittedEntity sme, boolean checkCondition, SDMSScope evalScope)
throws SDMSException
{
Iterator i = smeFp.keySet().iterator();
while(i.hasNext()) {
Long L = (Long) i.next();
if(!scopeFp.containsKey(L))
return false;
SDMSResource r = SDMSResourceTable.getObject(sysEnv, (Long) scopeFp.get(L));
Integer sAmount = r.getRequestableAmount(sysEnv);
SDMSResourceRequirement rr;
SDMSEnvironment e;
SDMSProxy p = (SDMSProxy) smeFp.get(L);
if (p instanceof SDMSResourceRequirement) {
rr = (SDMSResourceRequirement) p;
e = null;
} else {
e = (SDMSEnvironment) p;
rr = null;
}
if(checkCondition) {
String condition = (rr == null ? e.getCondition(sysEnv) : rr.getCondition(sysEnv));
if (condition != null) {
final BoolExpr be = new BoolExpr(condition);
try {
if (! be.checkCondition(sysEnv, r, sme, sme, null, null, evalScope)) return false;
} catch (CommonErrorException cee) {
SDMSNamedResource nr;
if (rr != null) {
nr = SDMSNamedResourceTable.getObject(sysEnv, rr.getNrId(sysEnv));
} else {
nr = SDMSNamedResourceTable.getObject(sysEnv, e.getNrId(sysEnv));
}
String msg = cee.toString() + " evaluating the condition for resource " + nr.pathString(sysEnv);
return false;
}
}
}
Integer jAmount;
if(rr == null) jAmount = new Integer(0);
else jAmount = rr.getAmount(sysEnv);
if(sAmount == null)
continue;
if(jAmount.compareTo(sAmount) > 0)
return false;
}
return true;
}
private boolean checkStaticResources(SystemEnvironment sysEnv, HashMap scopeFp, HashMap smeFp)
throws SDMSException
{
Iterator i = smeFp.keySet().iterator();
while(i.hasNext()) {
Long L = (Long) i.next();
if(!scopeFp.containsKey(L))
return false;
SDMSResource r = SDMSResourceTable.getObjectForUpdate(sysEnv, (Long) scopeFp.get(L));
SDMSNamedResource nr = SDMSNamedResourceTable.getObject(sysEnv, r.getNrId(sysEnv));
if(nr.getUsage(sysEnv).intValue() != SDMSNamedResource.STATIC) continue;
if(!r.getIsOnline(sysEnv).booleanValue()) return false;
}
return true;
}
private boolean reserveSyncResources(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, SDMSSchedulingEntity se, long actVersion, HashMap sfp, Locklist resourceChain, Iterator i)
throws SDMSException
{
SDMSResourceRequirement rr;
SDMSProxy proxy;
SDMSResource r;
Long smeId = sme.getId(sysEnv);
Long nrId;
Long rId;
Long stickyParent;
Long nStickyParent;
String rrStickyName;
SDMSResourceAllocation ra = null;
SDMSResourceAllocation mra = null;
boolean isSticky;
boolean allocSucceeded = true;
int waitAmount;
Lockmode waitLock;
Reservator rsrv = null;
sysEnv.tx.beginSubTransaction(sysEnv);
try {
Vector srv = new Vector();
while(i.hasNext()) {
proxy = (SDMSProxy) i.next();
if (!(proxy instanceof SDMSResourceRequirement)) continue;
rr = (SDMSResourceRequirement) proxy;
nrId = rr.getNrId(sysEnv);
SDMSNamedResource nr = SDMSNamedResourceTable.getObject(sysEnv, nrId);
if(nr.getUsage(sysEnv).intValue() != SDMSNamedResource.SYNCHRONIZING) continue;
r = SDMSResourceTable.getObject(sysEnv, (Long) sfp.get(nrId));
rId = r.getId(sysEnv);
try {
ra = SDMSResourceAllocationTable.idx_smeId_rId_stickyName_getUnique(sysEnv, new SDMSKey(smeId, rId, rr.getStickyName(sysEnv)));
} catch (NotFoundException nfe) {
if(SDMSResourceAllocationTable.idx_smeId_rId_stickyName.containsKey(sysEnv, new SDMSKey(smeId, nrId, rr.getStickyName(sysEnv))))
continue;
doTrace(cEnv, ": Job " + smeId + " needs a resource " + nrId + "/" + rId +
" which is neither requested/reserved/allocated nor ignored", SEVERITY_ERROR);
continue;
}
int allocType = ra.getAllocationType(sysEnv).intValue();
if(allocType == SDMSResourceAllocation.IGNORE) continue;
if(allocType == SDMSResourceAllocation.RESERVATION) continue;
if(allocType == SDMSResourceAllocation.ALLOCATION) continue;
if(SDMSResourceAllocationTable.idx_smeId_rId_stickyName.containsKey(sysEnv, new SDMSKey(smeId, nrId, rr.getStickyName(sysEnv))))
continue;
if(resourceChain != null) {
rsrv = resourceChain.get(rId);
}
if(rsrv == null) rsrv = new Reservator(rId, smeId);
else rsrv = new Reservator(rId, smeId, rsrv.amount, rsrv.lock.getLockmode());
isSticky = rr.getIsSticky(sysEnv).booleanValue();
if(isSticky) {
stickyParent = ra.getStickyParent(sysEnv);
nStickyParent = new Long(- stickyParent.longValue());
rrStickyName = rr.getStickyName(sysEnv);
try {
mra = SDMSResourceAllocationTable.idx_smeId_rId_stickyName_getUnique(sysEnv,
new SDMSKey(nStickyParent, rId, rrStickyName));
} catch (NotFoundException nfe) {
mra = createUpgradeMasterRequest(sysEnv, sme, rr, r, actVersion);
}
MasterReservationInfo mri = checkMasterReservation(sysEnv, sme, rr, stickyParent, r, rsrv);
if(mri.mustAllocate && (resourceChain != null)) {
resourceChain.set(new Reservator(rId, nStickyParent, mri.amount, mri.lockmode));
srv.add(mra);
}
if(!mri.canAllocate) {
if(resourceChain != null) {
allocSucceeded = false;
continue;
}
throw new SDMSEscape();
}
if(mri.mustAllocate) {
mra.setAllocationType(sysEnv, new Integer(SDMSResourceAllocation.MASTER_RESERVATION));
}
}
waitAmount = rsrv.amount;
waitLock = rsrv.lock;
int reason = r.checkAllocate(sysEnv, rr, sme, ra, waitAmount, waitLock);
if(resourceChain != null)
resourceChain.set(new Reservator(rId, smeId, rr.getAmount(sysEnv).intValue(), rr.getLockmode(sysEnv).intValue()));
if(reason != SDMSResource.REASON_AVAILABLE) {
if(resourceChain == null) throw new SDMSEscape();
if((reason & (SDMSResource.REASON_STATE|SDMSResource.REASON_EXPIRE|SDMSResource.REASON_OFFLINE)) != 0) {
resourceChain.removeSme(smeId);
throw new SDMSEscape();
}
allocSucceeded = false;
continue;
}
ra.setAllocationType(sysEnv, new Integer(SDMSResourceAllocation.RESERVATION));
if(isSticky) {
int mAmount = mra.getAmount(sysEnv).intValue();
int raAmount = ra.getAmount(sysEnv).intValue();
mra.setAmount(sysEnv, new Integer(mAmount - raAmount));
}
}
if(!allocSucceeded) throw new SDMSEscape();
if(resourceChain != null) {
resourceChain.removeSme(smeId);
for(int j = 0; j < srv.size(); j++) {
mra = (SDMSResourceAllocation) srv.get(j);
resourceChain.remove(mra.getRId(sysEnv), mra.getSmeId(sysEnv));
}
}
} catch(SDMSEscape e) {
sysEnv.tx.rollbackSubTransaction(sysEnv);
return false;
}
sysEnv.tx.commitSubTransaction(sysEnv);
return true;
}
private boolean reserveFp(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, SDMSSchedulingEntity se, Locklist resourceChain, HashMap lf_fp, int type)
throws SDMSException
{
HashMap fp = new HashMap();
Vector rrv = new Vector();
Iterator i = lf_fp.values().iterator();
while(i.hasNext()) {
Vector v = (Vector) i.next();
SDMSResource r = (SDMSResource) v.get(1);
fp.put(r.getNrId(sysEnv), r.getId(sysEnv));
rrv.add(v.get(0));
}
if(rrv.size() == 0) return true;
if(type == SDMSNamedResource.SYSTEM)
return reserveSysResources(sysEnv, sme, fp, resourceChain, rrv.iterator());
return reserveSyncResources(sysEnv, sme, se, sme.getSeVersion(sysEnv).longValue(), fp, resourceChain, rrv.iterator());
}
private SDMSResourceAllocation createUpgradeMasterRequest(SystemEnvironment sysEnv, SDMSSubmittedEntity sme,
SDMSResourceRequirement rr, SDMSResource r, long actVersion)
throws SDMSException
{
SDMSSubmittedEntity tsme;
SDMSResourceRequirement trr;
final Long nrId = rr.getNrId(sysEnv);
final Long rId = r.getId(sysEnv);
Long seId = null;
SDMSResourceAllocation ra;
float factor = 1;
Vector v;
int lockmode = Lockmode.N;
int amount = 0;
int refcount = 0;
v = SDMSSubmittedEntityTable.idx_masterId.getVectorForUpdate(sysEnv, sme.getMasterId(sysEnv));
for(int i = 0; i < v.size(); i++) {
tsme = (SDMSSubmittedEntity) v.get(i);
int state = tsme.getState(sysEnv).intValue();
if(state != SDMSSubmittedEntity.SUBMITTED &&
state != SDMSSubmittedEntity.DEPENDENCY_WAIT &&
state != SDMSSubmittedEntity.SYNCHRONIZE_WAIT) continue;
if(tsme.getJobIsFinal(sysEnv).booleanValue()) continue;
seId = tsme.getSeId(sysEnv);
try {
trr = SDMSResourceRequirementTable.idx_seId_nrId_getUnique(sysEnv, new SDMSKey(seId, nrId), actVersion);
} catch (NotFoundException nfe) {
continue;
}
if(! trr.getIsSticky(sysEnv).booleanValue())
continue;
try {
ra = SDMSResourceAllocationTable.idx_smeId_rId_stickyName_getUnique(sysEnv, new SDMSKey(tsme.getId(sysEnv), rId, null));
if(ra.getAllocationType(sysEnv).intValue() == SDMSResourceAllocation.IGNORE) continue;
} catch (NotFoundException nfe) { }
refcount++;
lockmode &= trr.getLockmode(sysEnv).intValue();
int tmp = trr.getAmount(sysEnv).intValue();
if(tmp > amount) amount = tmp;
}
ra = SDMSResourceAllocationTable.table.create(sysEnv,
rId, new Long(- sme.getMasterId(sysEnv)), nrId,
new Integer(amount),
new Integer(amount),
rr.getKeepMode(sysEnv),
Boolean.TRUE,
null,
sme.getMasterId(sysEnv),
new Integer(SDMSResourceAllocation.MASTER_REQUEST),
null,
new Integer(lockmode),
new Integer(refcount));
return ra;
}
public MasterReservationInfo checkMasterReservation(SystemEnvironment sysEnv, SDMSSubmittedEntity sme,
SDMSResourceRequirement rr, Long stickyParent, SDMSResource r)
throws SDMSException
{
return checkMasterReservation(sysEnv, sme, rr, stickyParent, r, new Reservator(r.getId(sysEnv), sme.getId(sysEnv)));
}
public MasterReservationInfo checkMasterReservation(SystemEnvironment sysEnv, SDMSSubmittedEntity sme,
SDMSResourceRequirement rr, Long stickyParent, SDMSResource r, Reservator rsrv)
throws SDMSException
{
if (sysEnv.maxWriter > 1 && sysEnv.tx.mode == SDMSTransaction.READWRITE)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
SDMSSubmittedEntity tsme;
SDMSResourceRequirement trr;
final Long nrId = rr.getNrId(sysEnv);
final Long rId = r.getId(sysEnv);
final MasterReservationInfo mri = new MasterReservationInfo();
SDMSResourceAllocation ra = null;
float factor = 1;
String rrStickyName = rr.getStickyName(sysEnv);
Long nStickyParent = new Long(- stickyParent.longValue());
mri.stickyName = rrStickyName;
mri.stickyParent = stickyParent;
Vector v;
int lockmode = Lockmode.N;
int amount = 0;
try {
ra = SDMSResourceAllocationTable.idx_smeId_rId_stickyName_getUnique(sysEnv, new SDMSKey(nStickyParent, rId, rrStickyName));
mri.amount = ra.getAmount(sysEnv).intValue();
mri.lockmode = ra.getLockmode(sysEnv).intValue();
} catch (NotFoundException nfe) {
return mri;
}
if (ra.getAllocationType(sysEnv).intValue() == SDMSResourceAllocation.MASTER_RESERVATION) {
mri.mustAllocate = false;
mri.canAllocate = true;
return mri;
}
int cAmount = (int) Math.ceil(mri.amount * factor);
if(!r.checkAmount(sysEnv, cAmount, mri.amount, rsrv.amount)) {
mri.mustAllocate = false;
mri.canAllocate = false;
return mri;
}
if(!r.syncCheckLockmode(sysEnv, mri.lockmode, rsrv.lock)) {
mri.mustAllocate = false;
mri.canAllocate = false;
return mri;
}
mri.mustAllocate = true;
mri.canAllocate = true;
mri.amount = cAmount;
mri.lockmode = lockmode;
return mri;
}
private boolean reserveSysResources(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, HashMap sfp, Locklist resourceChain, Iterator it)
throws SDMSException
{
SDMSResourceRequirement rr;
SDMSProxy proxy;
Long smeId = sme.getId(sysEnv);
Long rId;
Long nrId;
Reservator rsrv = null;
boolean allocSucceeded = true;
sysEnv.tx.beginSubTransaction(sysEnv);
try {
while(it.hasNext()) {
proxy = (SDMSProxy) it.next();
if (!(proxy instanceof SDMSResourceRequirement)) continue;
rr = (SDMSResourceRequirement) proxy;
nrId = rr.getNrId(sysEnv);
SDMSNamedResource nr = SDMSNamedResourceTable.getObject(sysEnv, nrId);
if(nr.getUsage(sysEnv).intValue() != SDMSNamedResource.SYSTEM) continue;
SDMSResource r = SDMSResourceTable.getObjectForUpdate(sysEnv, (Long) sfp.get(nrId));
rId = r.getId(sysEnv);
SDMSResourceAllocation ra = null;
try {
ra = SDMSResourceAllocationTable.idx_smeId_rId_stickyName_getUniqueForUpdate(sysEnv, new SDMSKey(smeId, rId, null));
} catch (NotFoundException nfe) {
if(SDMSResourceAllocationTable.idx_smeId_rId_stickyName.containsKey(sysEnv, new SDMSKey(smeId, nrId, null))) continue;
doTrace(cEnv, ": Job " + smeId + " needs a resource " + nrId + "/" + rId +
" which is neither requested/reserved/allocated nor ignored", SEVERITY_ERROR);
continue;
}
int allocType = ra.getAllocationType(sysEnv).intValue();
if(allocType == SDMSResourceAllocation.IGNORE) continue;
if(allocType == SDMSResourceAllocation.RESERVATION) continue;
if(allocType == SDMSResourceAllocation.ALLOCATION) continue;
if(resourceChain != null) {
rsrv = resourceChain.get(rId);
}
if(rsrv == null) rsrv = new Reservator(rId, smeId);
int waitAmount = rsrv.amount;
Lockmode waitLock = rsrv.lock;
int reason = r.checkAllocate(sysEnv, rr, sme, ra, waitAmount, waitLock);
if(reason != SDMSResource.REASON_AVAILABLE) {
if(resourceChain != null && reason != SDMSResource.REASON_OFFLINE) {
resourceChain.set(new Reservator(rId, smeId, rr.getAmount(sysEnv).intValue(), Lockmode.N));
allocSucceeded = false;
continue;
}
throw new SDMSEscape();
}
ra.setAllocationType(sysEnv, new Integer(SDMSResourceAllocation.RESERVATION));
}
if(!allocSucceeded) throw new SDMSEscape();
if(resourceChain != null) {
resourceChain.removeSme(smeId);
}
} catch(SDMSEscape e) {
sysEnv.tx.rollbackSubTransaction(sysEnv);
return false;
}
sysEnv.tx.commitSubTransaction(sysEnv);
return true;
}
private void merge(HashMap target, HashMap source)
{
Long L;
Iterator i = source.keySet().iterator();
while(i.hasNext()) {
L = (Long) i.next();
if(!target.containsKey(L))
target.put(L, source.get(L));
}
}
public HashMap getScopeFootprint(SystemEnvironment sysEnv, SDMSScope s)
throws SDMSException
{
if (sysEnv.maxWriter > 1 && sysEnv.tx.mode == SDMSTransaction.READWRITE)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
SDMSResource r;
SDMSScope ps;
HashMap fp = new HashMap();
HashMap tfp;
Long sId = s.getId(sysEnv);
Long psId;
Vector v = SDMSResourceTable.idx_scopeId.getVector(sysEnv, sId);
for(int i = 0; i < v.size(); i++) {
r = (SDMSResource) v.get(i);
fp.put(r.getNrId(sysEnv), r.getId(sysEnv));
}
psId = s.getParentId(sysEnv);
if(psId != null) {
ps = SDMSScopeTable.getObject(sysEnv, psId);
tfp = getScopeFootprint(sysEnv, ps);
merge(fp,tfp);
}
return fp;
}
public static Vector getJobFootprint(SystemEnvironment sysEnv, SDMSSubmittedEntity sme)
throws SDMSException
{
if (sysEnv.maxWriter > 1 && sysEnv.tx.mode == SDMSTransaction.READWRITE)
LockingSystem.lock(sysEnv, sysEnv.sched, ObjectLock.EXCLUSIVE);
SDMSnpJobFootprint jfp;
SDMSSchedulingEntity se;
SDMSEnvironment e;
SDMSResourceRequirement rr;
HashMap fp;
Long smeId = sme.getId(sysEnv);
try {
jfp = SDMSnpJobFootprintTable.idx_smeId_getUniqueForUpdate(sysEnv, smeId);
Vector result = new Vector();
result.add(jfp.getFpScope(sysEnv)) ;
result.add(jfp.getFpFolder(sysEnv)) ;
result.add(jfp.getFpLocal(sysEnv)) ;
return result;
} catch(NotFoundException nfe) {
}
fp = new HashMap();
Long seId = sme.getSeId(sysEnv);
long version = sme.getSeVersion(sysEnv).longValue();
se = SDMSSchedulingEntityTable.getObject(sysEnv, seId, version);
Vector v = SDMSEnvironmentTable.idx_neId.getVector(sysEnv, se.getNeId(sysEnv), version);
for(int i = 0; i < v.size(); i++) {
e = (SDMSEnvironment) v.get(i);
fp.put(e.getNrId(sysEnv), e);
}
Long parentId = se.getFolderId(sysEnv);
do {
SDMSFolder f = SDMSFolderTable.getObject(sysEnv, parentId, version);
Long id = f.getEnvId(sysEnv);
if(id != null) {
v = SDMSEnvironmentTable.idx_neId.getVector(sysEnv, id, version);
for(int i = 0; i < v.size(); i++) {
e = (SDMSEnvironment) v.get(i);
fp.put(e.getNrId(sysEnv), e);
}
}
parentId = f.getParentId(sysEnv);
} while(parentId != null);
v = SDMSResourceRequirementTable.idx_seId.getVector(sysEnv, se.getFpId(sysEnv), version);
for(int i = 0; i < v.size(); i++) {
rr = (SDMSResourceRequirement) v.get(i);
fp.put(rr.getNrId(sysEnv), rr);
}
v = SDMSResourceRequirementTable.idx_seId.getVector(sysEnv, seId, version);
for(int i = 0; i < v.size(); i++) {
rr = (SDMSResourceRequirement) v.get(i);
fp.put(rr.getNrId(sysEnv), rr);
}
return SystemEnvironment.sched.splitSmeFootprint(sysEnv, sme, se, fp, smeId);
}
private Vector splitSmeFootprint(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, SDMSSchedulingEntity se, HashMap fp, Long smeId)
throws SDMSException
{
SDMSResourceRequirement rr;
SDMSNamedResource nr;
SDMSResource r;
SDMSResource bestFit;
Long bestFitSmeId;
Long bestFitFId;
Long nrId;
HashMap fpFolder = new HashMap();
HashMap fpLocal = new HashMap();
HashMap fpScope = new HashMap();
Vector result = new Vector();
SDMSKey k = null;
Vector kv = null;
long actVersion = sme.getSeVersion(sysEnv).longValue();
if (sysEnv.tx.rscCache == null)
sysEnv.tx.rscCache = new HashMap();
HashMap myRscCache = sysEnv.tx.rscCache;
Iterator fpi = fp.keySet().iterator();
while(fpi.hasNext()) {
bestFit = null;
bestFitSmeId = null;
bestFitFId = null;
nrId = (Long) fpi.next();
SDMSProxy proxy = (SDMSProxy) fp.get(nrId);
if (proxy instanceof SDMSResourceRequirement) {
rr = (SDMSResourceRequirement) proxy;
} else {
rr = null;
}
if(rr == null) {
proxy.fix();
fpScope.put(nrId, proxy);
continue;
}
nr = SDMSNamedResourceTable.getObject(sysEnv, nrId);
if(nr.getUsage(sysEnv).intValue() == SDMSNamedResource.STATIC) {
rr.fix();
fpScope.put(nrId, rr);
continue;
}
SDMSSchedulingEntity myse = se;
SDMSSubmittedEntity mysme = sme;
boolean hit;
while(true) {
hit = false;
Long fId = myse.getFolderId(sysEnv);
long myActVersion = mysme.getSeVersion(sysEnv).longValue();
kv = new Vector();
while(fId != null) {
k = new SDMSKey(nrId, fId);
if (myRscCache.containsKey(k)) {
doTrace(cEnv, "Cache hit for folder " + k, SEVERITY_DEBUG);
hit = true;
Vector e = (Vector) myRscCache.get(k);
if (e == null) {
bestFitFId = null;
} else {
bestFit = (SDMSResource) e.get(0);
bestFitFId = (Long) e.get(1);
}
break;
} else {
doTrace(cEnv, "Cache miss for folder " + k, SEVERITY_DEBUG);
try {
r = SDMSResourceTable.idx_nrId_scopeId_getUnique(sysEnv, k);
bestFit = r;
bestFitFId = fId;
Vector e = new Vector();
r.fix();
e.add(r);
e.add(fId);
myRscCache.put(k, e);
for (int kvi = 0; kvi < kv.size(); ++kvi) {
myRscCache.put(kv.get(kvi), e);
}
break;
} catch (NotFoundException nfe) {
fId = SDMSFolderTable.getObject(sysEnv, fId, myActVersion).getParentId(sysEnv);
kv.add(k);
}
}
}
if(bestFitFId != null) break;
doTrace(cEnv, "No folder Resource found for " + k, SEVERITY_DEBUG);
if (!hit) {
for (int kvi = 0; kvi < kv.size(); ++kvi) {
myRscCache.put(kv.get(kvi), null);
}
}
Long pSmeId = mysme.getParentId(sysEnv);
if(pSmeId == null) break;
mysme = SDMSSubmittedEntityTable.getObject(sysEnv, pSmeId);
myse = SDMSSchedulingEntityTable.getObject(sysEnv, mysme.getSeId(sysEnv), myActVersion);
}
Long pSmeId = smeId;
kv = new Vector();
hit = false;
while(pSmeId != null) {
k = new SDMSKey(nrId, pSmeId);
if (myRscCache.containsKey(k)) {
doTrace(cEnv, "Cache hit for sme " + k, SEVERITY_DEBUG);
hit = true;
Vector e = (Vector) myRscCache.get(k);
if (e == null) {
bestFitSmeId = null;
} else {
bestFit = (SDMSResource) e.get(0);
bestFitSmeId = (Long) e.get(1);
}
break;
} else {
try {
doTrace(cEnv, "Cache miss for sme " + k, SEVERITY_DEBUG);
r = SDMSResourceTable.idx_nrId_scopeId_getUnique(sysEnv, k);
bestFit = r;
bestFitSmeId = pSmeId;
Vector e = new Vector();
r.fix();
e.add(r);
e.add(pSmeId);
myRscCache.put(k, e);
for (int kvi = 0; kvi < kv.size(); ++kvi) {
myRscCache.put(kv.get(kvi), e);
}
break;
} catch (NotFoundException nfe) {
pSmeId = SDMSSubmittedEntityTable.getObject(sysEnv, pSmeId).getParentId(sysEnv);
kv.add(k);
}
}
}
if(bestFitSmeId == null && !hit) {
doTrace(cEnv, "No sme Resource found for " + k, SEVERITY_DEBUG);
for (int kvi = 0; kvi < kv.size(); ++kvi) {
myRscCache.put(kv.get(kvi), null);
}
}
if(bestFitSmeId != null || bestFitFId != null) {
Integer requestableAmount = bestFit.getRequestableAmount(sysEnv);
Integer requestedAmount = rr.getAmount(sysEnv);
if(requestableAmount != null) {
if(requestableAmount.compareTo(requestedAmount) < 0 && sysEnv.tx.mode == SDMSTransaction.READWRITE) {
sme.setToError(sysEnv, "Job cannot run because of resource shortage on resource " + nr.pathString(sysEnv));
}
}
Vector v = new Vector();
rr.fix();
v.add(rr);
bestFit.fix();
v.add(bestFit);
if(bestFitSmeId != null) {
fpLocal.put(nrId, v);
} else {
fpFolder.put(nrId, v);
}
} else {
rr.fix();
fpScope.put(nrId, rr);
}
}
if(sysEnv.tx.mode == SDMSTransaction.READWRITE) {
SDMSnpJobFootprintTable.table.create(sysEnv, smeId, fpScope, fpFolder, fpLocal);
}
result.add(fpScope) ;
result.add(fpFolder) ;
result.add(fpLocal) ;
return result;
}
void recalc_sfp(SystemEnvironment sysEnv, Long scopeId, SDMSScope s)
throws SDMSException
{
doTrace(cEnv, "Calculating footprint for scope " + s.pathString(sysEnv), SEVERITY_DEBUG);
if(s.getType(sysEnv).intValue() == SDMSScope.SERVER) {
HashMap fp = getScopeFootprint(sysEnv, s);
doTrace(cEnv, "footprint = " + fp.toString(), SEVERITY_DEBUG);
(SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, scopeId)).setFp(sysEnv, fp);
} else {
Vector v = SDMSScopeTable.idx_parentId.getVector(sysEnv, scopeId);
for(int i = 0; i < v.size(); i++) {
s = (SDMSScope) v.get(i);
recalc_sfp(sysEnv, s.getId(sysEnv), s);
}
}
}
void destroyEnvironment(SystemEnvironment sysEnv)
throws SDMSException
{
SDMSnpSrvrSRFootprintTable.table.clearTableUnlocked(sysEnv);
}
void buildEnvironment(SystemEnvironment sysEnv, boolean jsOnly)
throws SDMSException
{
SDMSScope s;
Vector v;
v = SDMSScopeTable.idx_type.getVector(sysEnv, new Integer(SDMSScope.SERVER));
for(int j = 0; j < v.size(); j++) {
s = (SDMSScope) v.get(j);
SDMSnpSrvrSRFootprintTable.table.create(sysEnv, s.getId(sysEnv), null, getScopeFootprint(sysEnv, s));
}
if (!jsOnly) {
Vector rl = new Vector();
SDMSSubmittedEntity sme;
SDMSSchedulingEntity se;
v = SDMSSubmittedEntityTable.idx_state.getVector(sysEnv, new Integer(SDMSSubmittedEntity.DEPENDENCY_WAIT));
for (int i = 0; i < v.size(); ++i) {
sme = (SDMSSubmittedEntity) v.get(i);
se = SDMSSchedulingEntityTable.getObject(sysEnv, sme.getSeId(sysEnv), sme.getSeVersion(sysEnv));
if (se.getType(sysEnv).intValue() != SDMSSchedulingEntity.JOB) continue;
if (sme.getOldState(sysEnv) != null)
rl.add(sme.getId(sysEnv));
}
v = SDMSSubmittedEntityTable.idx_state.getVector(sysEnv, new Integer(SDMSSubmittedEntity.SYNCHRONIZE_WAIT));
for (int i = 0; i < v.size(); ++i) {
sme = (SDMSSubmittedEntity) v.get(i);
se = SDMSSchedulingEntityTable.getObject(sysEnv, sme.getSeId(sysEnv), sme.getSeVersion(sysEnv));
if (se.getType(sysEnv).intValue() != SDMSSchedulingEntity.JOB) continue;
if (sme.getOldState(sysEnv) != null)
rl.add(sme.getId(sysEnv));
}
addToRequestList(rl);
}
needSched = true;
}
public void requestSchedule()
{
needSched = true;
this.wakeUp();
}
public void notifyChange(SystemEnvironment sysEnv, SDMSResource r, Long scopeId, int change)
throws SDMSException
{
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
if (scopeId != null) {
SDMSScope s = null;
try {
s = SDMSScopeTable.getObject(sysEnv, scopeId);
} catch (NotFoundException nfe) {
}
if (s != null)
recalc_sfp(sysEnv, scopeId, s);
}
switch(change) {
case CREATE:
needReSched = true;
break;
case ALTER:
break;
case ALTER_REQAMOUNT:
needReSched = true;
break;
case OFFLINE_ONLINE:
needReSched = true;
break;
case DELETE:
needReSched = true;
break;
default:
throw new FatalException(new SDMSMessage(sysEnv, "03202252140", "Unknown change code $1", new Integer(change)));
}
needSched = true;
}
public void notifyChange(SystemEnvironment sysEnv, SDMSNamedResource nr, int change)
throws SDMSException
{
switch(change) {
case CREATE:
break;
case ALTER:
break;
case DELETE:
break;
default:
throw new FatalException(new SDMSMessage(sysEnv, "03203060018", "Unknown change code $1", new Integer(change)));
}
needSched = true;
}
public void notifyChange(SystemEnvironment sysEnv, SDMSScope s, int change)
throws SDMSException
{
switch(change) {
case CREATE:
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
SDMSnpSrvrSRFootprintTable.table.create(sysEnv, s.getId(sysEnv), null, getScopeFootprint(sysEnv, s));
break;
case ALTER:
break;
case DELETE:
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
SDMSnpSrvrSRFootprint f = SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, s.getId(sysEnv));
f.delete(sysEnv);
needReSched = true;
break;
case REGISTER:
case DEREGISTER:
needReSched = true;
break;
case SUSPEND:
break;
case RESUME:
break;
case SHUTDOWN:
needSched = true;
break;
case MOVE:
case COPY:
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
destroyEnvironment(sysEnv);
buildEnvironment(sysEnv, true);
needSched = true;
break;
default:
throw new FatalException(new SDMSMessage(sysEnv, "03202252142", "Unknown change code $1", new Integer(change)));
}
needSched = true;
}
public void notifyChange(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, int change)
throws SDMSException
{
int size;
Vector v;
switch(change) {
case SUSPEND:
break;
case STATECHANGE:
int s = sme.getState(sysEnv).intValue();
switch(s) {
case SDMSSubmittedEntity.FINISHED:
case SDMSSubmittedEntity.FINAL:
case SDMSSubmittedEntity.ERROR:
case SDMSSubmittedEntity.CANCELLED:
case SDMSSubmittedEntity.BROKEN_ACTIVE:
case SDMSSubmittedEntity.BROKEN_FINISHED:
v = SDMSRunnableQueueTable.idx_smeId.getVector(sysEnv, sme.getId(sysEnv));
size = v.size();
for (int i = 0; i < size; i ++) {
SDMSRunnableQueue rq = (SDMSRunnableQueue)v.get(i);
rq.delete(sysEnv);
}
break;
}
needSched = true;
break;
case FINISH:
case PRIORITY:
case RERUN:
case IGNORE_RESOURCE:
case SUBMIT:
case RESUME:
needSched = true;
break;
default:
throw new FatalException(new SDMSMessage(sysEnv, "03202252317", "Unknown change code $1", new Integer(change)));
}
}
public void requestReschedule()
{
needReSched = true;
}
}
class DoSchedule extends Node
{
static final int SCHEDULE = 0;
static final int INITIALIZE = 1;
int action;
public DoSchedule()
{
super();
action = SCHEDULE;
auditFlag = false;
}
public DoSchedule(int a)
{
super();
action = a;
auditFlag = false;
}
public void go(SystemEnvironment sysEnv)
throws SDMSException
{
switch(action) {
case SCHEDULE:
SystemEnvironment.sched.scheduleProtected(sysEnv);
break;
case INITIALIZE:
SystemEnvironment.sched.buildEnvironment(sysEnv, false);
}
}
}
class prioComparator implements Comparator
{
SystemEnvironment sysEnv;
long priorityDelay;
long now;
public prioComparator(SystemEnvironment e, long p)
{
sysEnv = e;
priorityDelay = p;
now = System.currentTimeMillis();
}
public void setNow()
{
now = System.currentTimeMillis();
}
public int dynPrio(SDMSSubmittedEntity sme)
throws SDMSException
{
int p = sme.getPriority(sysEnv).intValue();
int lb = Math.max(SystemEnvironment.priorityLowerBound, sme.getMinPriority(sysEnv).intValue());
if (p <= lb) return p;
long t = now - sme.getSubmitTs(sysEnv).longValue();
long priorityDelay = sme.getAgingAmount(sysEnv).intValue();
if(priorityDelay != 0)
t /= (priorityDelay * 60000);
else
t = 0;
if(t >= p) p = lb;
else p = Math.max(p - (int) t, lb);
return p;
}
public int compare(Object o1, Object o2)
{
SDMSSubmittedEntity sme1;
SDMSSubmittedEntity sme2;
int p1, p2;
sme1 = (SDMSSubmittedEntity) o1;
sme2 = (SDMSSubmittedEntity) o2;
try {
p1 = dynPrio(sme1);
p2 = dynPrio(sme2);
if(p1 < p2) return -1;
if(p1 > p2) return 1;
int rp1, rp2;
rp1 = sme1.getRawPriority(sysEnv).intValue();
rp2 = sme2.getRawPriority(sysEnv).intValue();
if(rp1 < rp2) return -1;
if(rp1 > rp2) return 1;
long l1, l2;
l1 = sme1.getId(sysEnv).longValue();
l2 = sme2.getId(sysEnv).longValue();
if(l1 < l2) return -1;
if(l1 > l2) return 1;
} catch (SDMSException e) {
throw new RuntimeException("Error while comparing : " + e.toString());
}
return 0;
}
}
class Reservator
{
public Long rId;
public Long smeId;
public int amount;
public Lockmode lock;
public int seq;
public Reservator(Long r, Long s)
{
rId = r;
smeId = s;
amount = 0;
lock = new Lockmode();
seq = 0;
}
public Reservator(Long r, Long s, int a)
{
rId = r;
smeId = s;
amount = a;
lock = new Lockmode();
seq = 0;
}
public Reservator(Long r, Long s, int a, Lockmode l)
{
rId = r;
smeId = s;
amount = a;
lock = l;
seq = 0;
}
public Reservator(Long r, Long s, int a, int l)
{
rId = r;
smeId = s;
amount = a;
lock = new Lockmode(l);
seq = 0;
}
public int addLock(Lockmode lm)
{
int l = lm.getLockmode();
int ol = lock.getLockmode();
if(l != Lockmode.N) {
if(ol == Lockmode.N) lock.setLockmode(l);
else {
if(l != ol) lock.setLockmode(Lockmode.X);
}
}
return lock.getLockmode();
}
}
class Locklist
{
private HashMap lpr;
private HashMap lpj;
static private final Long ZERO = new Long(0);
public Locklist()
{
lpr = new HashMap();
lpj = new HashMap();
}
public Reservator get(Long rId, Long smeId)
{
HashMap h = (HashMap) lpr.get(rId);
if(h == null) return new Reservator(rId, smeId);
Reservator r = (Reservator) h.get(smeId);
if(r == null) return new Reservator(rId, smeId);
return r;
}
public Reservator get(Long rId)
{
return get(rId, ZERO);
}
public void set(Reservator r)
{
HashMap h = (HashMap) lpr.get(r.rId);
if(h == null) {
h = new HashMap();
lpr.put(r.rId, h);
}
h.put(r.smeId, r);
Reservator rt = (Reservator) h.get(ZERO);
if(rt == null) {
rt = new Reservator(r.rId, ZERO);
h.put(ZERO, rt);
}
rt.amount += r.amount;
rt.addLock(r.lock);
rt.seq++;
r.seq = rt.seq;
h = (HashMap) lpj.get(r.smeId);
if(h == null) {
h = new HashMap();
lpj.put(r.smeId, h);
}
h.put(r.rId, r);
}
public void removeSme(Long smeId)
{
HashMap h = (HashMap) lpj.get(smeId);
if(h == null) return;
Iterator i = h.keySet().iterator();
while(i.hasNext()) {
Long rId = (Long) i.next();
HashMap rh = (HashMap) lpr.get(rId);
rh.remove(smeId);
rh.remove(ZERO);
Reservator zr = new Reservator(rId, ZERO);
Iterator j = rh.values().iterator();
while(j.hasNext()) {
Reservator r = (Reservator) j.next();
zr.amount += r.amount;
zr.addLock(r.lock);
zr.seq++;
}
rh.put(ZERO, zr);
}
lpj.remove(smeId);
}
public void remove(Long rId, Long smeId)
{
HashMap h = (HashMap) lpj.get(smeId);
if(h == null) return;
if(h.remove(rId) == null) return;
h = (HashMap) lpr.get(rId);
h.remove(smeId);
h.remove(ZERO);
Reservator zr = new Reservator(rId, ZERO);
Iterator i = h.values().iterator();
while(i.hasNext()) {
Reservator r = (Reservator) i.next();
zr.amount += r.amount;
zr.addLock(r.lock);
zr.seq++;
}
h.put(ZERO, zr);
}
}
| src/server/SchedulingThread.java | /*
Copyright (c) 2000-2013 "independIT Integrative Technologies GmbH",
Authors: Ronald Jeninga, Dieter Stubler
schedulix Enterprise Job Scheduling System
independIT Integrative Technologies GmbH [http://www.independit.de]
mailto:[email protected]
This file is part of schedulix
schedulix is free software:
you can redistribute it and/or modify it under the terms of the
GNU Affero General Public License as published by the
Free Software Foundation, either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package de.independit.scheduler.server;
import java.io.*;
import java.util.*;
import java.lang.*;
import java.net.*;
import java.sql.*;
import java.math.*;
import de.independit.scheduler.server.exception.*;
import de.independit.scheduler.server.locking.*;
import de.independit.scheduler.server.output.*;
import de.independit.scheduler.server.parser.*;
import de.independit.scheduler.server.repository.*;
import de.independit.scheduler.server.util.*;
public class SchedulingThread extends InternalSession
{
public final static String name = "SchedulingThread";
private boolean needSched;
private boolean needReSched;
private long priorityDelay;
private prioComparator pc;
private long timeoutWakeup;
private long lastSchedule;
private Locklist publl = null;
private Vector<Long> resourceRequestList = null;
private final Object resourceRequestLock = new Object();
private final Integer lock = new Integer(0);
private Vector<Long> actualRequestList;
public static final int CREATE = 1;
public static final int ALTER = 2;
public static final int DELETE = 3;
public static final int REGISTER = 4;
public static final int DEREGISTER = 5;
public static final int SUSPEND = 6;
public static final int RESUME = 7;
public static final int SHUTDOWN = 8;
public static final int FINISH = 9;
public static final int STATECHANGE = 10;
public static final int COPY = 11;
public static final int IGNORE_RESOURCE = 12;
public static final int MOVE = 13;
public static final int PRIORITY = 14;
public static final int ALTER_REQAMOUNT = 15;
public static final int OFFLINE_ONLINE = 16;
public static final int RERUN = 17;
public static final int SUBMIT = 18;
public static final int MAX_PRIORITY = 0;
public static final int DEFAULT_PRIORITY = 50;
public static final int MIN_PRIORITY = 100;
public final static int FP_SCOPE = 0;
public final static int FP_FOLDER = 1;
public final static int FP_LOCAL = 2;
private final static Integer ONE = new Integer(1);
public long envhit = 0;
public long envmiss = 0;
private long timer = 0;
private java.util.Date dts = new java.util.Date();
public SchedulingThread(SystemEnvironment env, SyncFifo f)
throws SDMSException
{
super(name);
NR = 1234321;
initThread(env, f, NR, name, SystemEnvironment.scheduleWakeupInterval*1000);
priorityDelay = SystemEnvironment.priorityDelay;
priorityDelay *= 60000;
needSched = true;
needReSched = true;
if(pc == null)
pc = new prioComparator(env, priorityDelay);
timeoutWakeup = Long.MAX_VALUE;
lastSchedule = 0;
}
protected Node getNode(int m)
{
if(m == INITIALIZE) return new DoSchedule(DoSchedule.INITIALIZE);
return new DoSchedule();
}
public int getDynPriority(SystemEnvironment sysEnv, SDMSSubmittedEntity sme)
throws SDMSException
{
prioComparator myPc = new prioComparator(sysEnv, priorityDelay);
myPc.setNow();
return myPc.dynPrio(sme);
}
public boolean isBlocked(SystemEnvironment sysEnv, Long smeId, Long rId)
throws SDMSException
{
Reservator r;
synchronized(lock) {
if(publl == null) return false;
r = publl.get(rId, smeId);
}
if(r == null) return false;
return (r.seq > 1 ? true : false);
}
public void addToRequestList(SystemEnvironment sysEnv, Long smeId)
{
synchronized (resourceRequestLock) {
if (sysEnv.tx.resourceRequestList == null)
sysEnv.tx.resourceRequestList = new Vector<Long>();
sysEnv.tx.resourceRequestList.add(smeId);
}
}
public void publishRequestList(SystemEnvironment sysEnv)
{
if (sysEnv.tx.resourceRequestList == null) return;
addToRequestList(sysEnv.tx.resourceRequestList);
}
private void addToRequestList(Vector v)
{
synchronized (resourceRequestLock) {
if (resourceRequestList == null)
resourceRequestList = new Vector<Long>();
resourceRequestList.addAll(v);
}
}
private Vector getRequestList()
{
Vector retval;
synchronized (resourceRequestLock) {
retval = resourceRequestList;
resourceRequestList = null;
}
return (retval == null ? new Vector<Long>() : retval);
}
private void processRequestList(SystemEnvironment sysEnv)
throws SDMSException
{
Vector<Long> v = getRequestList();
actualRequestList = v;
try {
for (int i = 0; i < v.size(); ++i) {
SDMSSubmittedEntity sme;
Long smeId = v.get(i);
Integer oldState;
int os;
try {
sme = SDMSSubmittedEntityTable.getObjectForUpdate(sysEnv, smeId);
oldState = sme.getOldState(sysEnv);
if (oldState == null) {
continue;
}
os = oldState.intValue();
} catch (NotFoundException nfe) {
continue;
}
int state = sme.getState(sysEnv).intValue();
if (state == SDMSSubmittedEntity.DEPENDENCY_WAIT) {
requestSyncSme(sysEnv, sme, oldState.intValue());
if (sme.getState(sysEnv).intValue() != SDMSSubmittedEntity.ERROR) {
if (os == SDMSSubmittedEntity.SUBMITTED ||
os == SDMSSubmittedEntity.DEPENDENCY_WAIT ||
os == SDMSSubmittedEntity.ERROR ||
os == SDMSSubmittedEntity.UNREACHABLE)
sme.checkDependencies(sysEnv);
else
sme.setState(sysEnv, SDMSSubmittedEntity.SYNCHRONIZE_WAIT);
}
} else {
if (state == SDMSSubmittedEntity.SYNCHRONIZE_WAIT) {
reevaluateJSAssignment(sysEnv, sme);
requestSysSme(sysEnv, sme);
} else {
}
}
needSched = true;
}
} catch (SDMSException e) {
throw e;
}
}
protected void scheduleProtected(SystemEnvironment sysEnv)
throws SDMSException
{
try {
schedule(sysEnv);
} catch (Throwable e) {
if (e instanceof SerializationException) {
if (actualRequestList != null) {
addToRequestList(actualRequestList);
}
throw e;
} else {
StringWriter stackTrace = new StringWriter();
e.printStackTrace(new PrintWriter(stackTrace));
doTrace(sysEnv.cEnv, "Schedule threw an exception; server will abort " + e.toString() + ':' + e.getMessage() + "\n" + stackTrace.toString(), SEVERITY_FATAL);
System.exit(1);
}
}
actualRequestList = null;
}
private void schedule(SystemEnvironment sysEnv)
throws SDMSException
{
dts = new java.util.Date();
timer = dts.getTime();
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
processRequestList(sysEnv);
if(needReSched) {
doTrace(cEnv, "==============> Start Resource Rescheduling <=================\nStartTime = 0", SEVERITY_MESSAGE);
SDMSnpJobFootprintTable.table.clearTableUnlocked(sysEnv);
reschedule(sysEnv);
doTrace(cEnv, "==============> End Resource Rescheduling <=================\nEndTime = " + (dts.getTime() - timer), SEVERITY_MESSAGE);
needSched = true;
}
if(!needSched) {
long ts = dts.getTime() - timeoutWakeup;
if((ts < 0) && (timer < lastSchedule + 10000 )) {
return;
}
}
lastSchedule = timer;
Locklist resourceChain = new Locklist();
doTrace(cEnv, "---------------> Start Synchronize Resource Scheduling <-------------------\nStartTime = " + (dts.getTime() - timer), SEVERITY_MESSAGE);
needSched = false;
syncSchedule(sysEnv, resourceChain);
doTrace(cEnv, "---------------> Start System Resource Scheduling <-------------------\nStartTime = " + (dts.getTime() - timer), SEVERITY_MESSAGE);
resourceSchedule(sysEnv, resourceChain);
synchronized(lock) {
publl = resourceChain;
}
doTrace(cEnv, "---------------> Start Cleanup LifeTables <-------------------\nStartTime = " + (dts.getTime() - timer), SEVERITY_MESSAGE);
long purgeLow = sysEnv.roTxList.first(sysEnv);
doTrace(cEnv, "purgeLow = " + purgeLow, SEVERITY_MESSAGE);
doTrace(cEnv, "purgeSetSize = " + sysEnv.nvPurgeSet.size(), SEVERITY_MESSAGE);
sysEnv.nvPurgeSet.purge(sysEnv, purgeLow);
doTrace(cEnv, "---------------> End Resource Scheduling <-------------------\nEndTime = " + (dts.getTime() - timer), SEVERITY_MESSAGE);
}
public boolean getNextJobSchedule(SystemEnvironment sysEnv)
throws SDMSException
{
if(needReSched)
return false;
if (sysEnv.maxWriter > 1) {
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
if(needReSched)
return false;
}
HashSet myGroups = new HashSet();
myGroups.add(SDMSObject.adminGId);
sysEnv.cEnv.pushGid(sysEnv, myGroups);
sysEnv.cEnv.setUser();
try {
scheduleProtected(sysEnv);
} finally {
sysEnv.cEnv.popGid(sysEnv);
sysEnv.cEnv.setJobServer();
}
return true;
}
public boolean getPoolSchedule(SystemEnvironment sysEnv)
throws SDMSException
{
if(needReSched)
return false;
if (sysEnv.maxWriter > 1) {
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
if(needReSched)
return false;
}
scheduleProtected(sysEnv);
return true;
}
private void reschedule(SystemEnvironment sysEnv)
throws SDMSException
{
pc.setNow();
needReSched = false;
Vector sv = SDMSScopeTable.idx_type.getVectorForUpdate(sysEnv, new Integer(SDMSScope.SERVER));
Vector rjv = SDMSSubmittedEntityTable.idx_state.getVectorForUpdate(sysEnv, new Integer(SDMSSubmittedEntity.RUNNABLE), null, Integer.MAX_VALUE);
doTrace(cEnv, "Number of Runnable Jobs found: " + rjv.size(), SEVERITY_MESSAGE);
doTrace(cEnv, "==============> Rescheduling Runnables <=================\nStartTime = " + (dts.getTime() - timer), SEVERITY_MESSAGE);
rescheduleVector(sysEnv, rjv, sv, SDMSSubmittedEntity.RUNNABLE);
doTrace(cEnv, "==============> Rescheduling Resource Wait <=================\nStartTime = " + (dts.getTime() - timer), SEVERITY_MESSAGE);
Vector smev = SDMSSubmittedEntityTable.idx_state.getVectorForUpdate(sysEnv, new Integer(SDMSSubmittedEntity.RESOURCE_WAIT), null, Integer.MAX_VALUE);
doTrace(cEnv, "Number of Jobs in Resource Wait found: " + smev.size(), SEVERITY_MESSAGE);
rescheduleVector(sysEnv, smev, sv, SDMSSubmittedEntity.RESOURCE_WAIT);
doTrace(cEnv, "==============> Rescheduling Synchronize Wait <=================\nStartTime = " + (dts.getTime() - timer), SEVERITY_MESSAGE);
smev = SDMSSubmittedEntityTable.idx_state.getVectorForUpdate(sysEnv, new Integer(SDMSSubmittedEntity.SYNCHRONIZE_WAIT), null, Integer.MAX_VALUE);
doTrace(cEnv, "Number of Jobs in Synchronize Wait found: " + smev.size(), SEVERITY_MESSAGE);
rescheduleVector(sysEnv, smev, sv, SDMSSubmittedEntity.SYNCHRONIZE_WAIT);
doTrace(cEnv, "==============> Rescheduling Dependency Wait <=================\nStartTime = " + (dts.getTime() - timer), SEVERITY_MESSAGE);
smev = SDMSSubmittedEntityTable.idx_state.getVector(sysEnv, new Integer(SDMSSubmittedEntity.DEPENDENCY_WAIT), null, Integer.MAX_VALUE);
doTrace(cEnv, "Number of Jobs in Dependency Wait found: " + smev.size(), SEVERITY_MESSAGE);
rescheduleVector(sysEnv, smev, sv, SDMSSubmittedEntity.DEPENDENCY_WAIT);
}
private void rescheduleVector(SystemEnvironment sysEnv, Vector smev, Vector sv, int maxState)
throws SDMSException
{
SDMSSubmittedEntity sme;
SDMSSchedulingEntity se;
SDMSResourceAllocation ra;
Long smeId;
boolean suspended;
Locklist ll = new Locklist();
long actVersion;
Collections.sort(smev, pc);
for(int i = 0; i < smev.size(); i++) {
sme = (SDMSSubmittedEntity) smev.get(i);
actVersion = sme.getSeVersion(sysEnv).longValue();
se = SDMSSchedulingEntityTable.getObject(sysEnv, sme.getSeId(sysEnv), actVersion);
if(se.getType(sysEnv).intValue() != SDMSSchedulingEntity.JOB) continue;
smeId = sme.getId(sysEnv);
if(sme.getIsSuspended(sysEnv).intValue() != SDMSSubmittedEntity.NOSUSPEND || sme.getParentSuspended(sysEnv).intValue() > 0)
suspended = true;
else
suspended = false;
Vector v = SDMSRunnableQueueTable.idx_smeId.getVectorForUpdate(sysEnv, smeId);
for(int j = 0; j < v.size(); j++) {
((SDMSRunnableQueue) v.get(j)).delete(sysEnv);
}
v = SDMSResourceAllocationTable.idx_smeId.getVectorForUpdate(sysEnv, smeId);
for(int j = 0; j < v.size(); j++) {
ra = (SDMSResourceAllocation) v.get(j);
int allocType = ra.getAllocationType(sysEnv).intValue();
if(allocType != SDMSResourceAllocation.ALLOCATION &&
allocType != SDMSResourceAllocation.IGNORE &&
!ra.getIsSticky(sysEnv).booleanValue()) {
ra.delete(sysEnv, false, true);
}
}
requestSyncSme(sysEnv, sme, SDMSSubmittedEntity.DEPENDENCY_WAIT);
if(sme.getState(sysEnv).intValue() == SDMSSubmittedEntity.ERROR)
continue;
reevaluateJSAssignment(sysEnv, sme);
if(sme.getState(sysEnv).intValue() == SDMSSubmittedEntity.ERROR)
continue;
if(maxState == SDMSSubmittedEntity.RUNNABLE ||
maxState == SDMSSubmittedEntity.RESOURCE_WAIT ||
maxState == SDMSSubmittedEntity.SYNCHRONIZE_WAIT) {
requestSysSme(sysEnv, sme);
}
if((maxState == SDMSSubmittedEntity.RUNNABLE ||
maxState == SDMSSubmittedEntity.RESOURCE_WAIT) ||
(maxState == SDMSSubmittedEntity.SYNCHRONIZE_WAIT && !suspended))
syncScheduleSme(sysEnv, sme, ll);
if(maxState == SDMSSubmittedEntity.RUNNABLE ||
(sme.getState(sysEnv).intValue() == SDMSSubmittedEntity.RESOURCE_WAIT && !suspended))
resourceScheduleSme(sysEnv, sme, ll);
}
}
public void syncSchedule(SystemEnvironment sysEnv, Locklist resourceChain)
throws SDMSException
{
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
SDMSSubmittedEntity sme;
int i;
Vector smev = SDMSSubmittedEntityTable.idx_state.getVectorForUpdate(sysEnv, new Integer(SDMSSubmittedEntity.SYNCHRONIZE_WAIT), null, Integer.MAX_VALUE);
Vector sv = SDMSScopeTable.idx_type.getVector(sysEnv, new Integer(SDMSScope.SERVER));
doTrace(cEnv, "Number of Job Server : " + sv.size(), SEVERITY_DEBUG);
doTrace(cEnv, "Number of Jobs in SYNCHRONIZE_WAIT : " + smev.size(), SEVERITY_DEBUG);
if(sv.size() == 0) {
return;
}
timeoutWakeup = Long.MAX_VALUE;
pc.setNow();
Collections.sort(smev, pc);
for(i = 0; i < smev.size(); ++i) {
sme = (SDMSSubmittedEntity) smev.get(i);
if(sme.getIsSuspended(sysEnv).intValue() != SDMSSubmittedEntity.NOSUSPEND ||
sme.getParentSuspended(sysEnv).intValue() > 0 ||
sme.getOldState(sysEnv) != null)
continue;
syncScheduleSme(sysEnv, sme, resourceChain);
}
}
public void requestSyncSme(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, int oldState)
throws SDMSException
{
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
long actVersion = sme.getSeVersion(sysEnv).longValue();
SDMSSchedulingEntity se = SDMSSchedulingEntityTable.getObject(sysEnv, sme.getSeId(sysEnv), actVersion);
if(se.getType(sysEnv).intValue() != SDMSSchedulingEntity.JOB) return;
Vector sv = getServerList(sysEnv, sme, se, actVersion);
requestResourceSme(sysEnv, sme, se, sv, SDMSNamedResource.SYNCHRONIZING, actVersion, oldState);
sme.setOldState(sysEnv, null);
}
public void requestSysSme(SystemEnvironment sysEnv, SDMSSubmittedEntity sme)
throws SDMSException
{
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
long actVersion = sme.getSeVersion(sysEnv).longValue();
SDMSSchedulingEntity se = SDMSSchedulingEntityTable.getObject(sysEnv, sme.getSeId(sysEnv), actVersion);
if(se.getType(sysEnv).intValue() != SDMSSchedulingEntity.JOB) return;
Vector sv = findRelevantJobserver (sysEnv, sme);
requestResourceSme(sysEnv, sme, se, sv, SDMSNamedResource.SYSTEM, actVersion, SDMSSubmittedEntity.SYNCHRONIZE_WAIT);
sme.setOldState(sysEnv, null);
}
private Vector getServerList(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, SDMSSchedulingEntity se, long actVersion)
throws SDMSException
{
Vector cacheEntry;
Vector result = null;
Long validFrom;
Long validTo;
if (sysEnv.tx.envJSMap == null)
sysEnv.tx.envJSMap = new HashMap();
HashMap envJSMap = sysEnv.tx.envJSMap;
Long envId = se.getNeId(sysEnv);
cacheEntry = (Vector) envJSMap.get(envId);
if (cacheEntry != null) {
for (int i = 0; i < cacheEntry.size(); ++i) {
Vector v = (Vector) cacheEntry.get(i);
validFrom = (Long) v.get(0);
validTo = (Long) v.get(1);
if (validFrom.longValue() < actVersion && validTo.longValue() >= actVersion) {
result = (Vector) v.get(2);
++envhit;
break;
}
}
}
if (cacheEntry == null || result == null) {
++envmiss;
Vector envv = SDMSEnvironmentTable.idx_neId.getVector(sysEnv, envId, actVersion);
SDMSNamedEnvironment ne = SDMSNamedEnvironmentTable.getObject(sysEnv, envId, actVersion);
validFrom = new Long(ne.getValidFrom(sysEnv));
validTo = new Long(ne.getValidTo(sysEnv));
result = SDMSScopeTable.idx_type.getVectorForUpdate(sysEnv, new Integer(SDMSScope.SERVER));
Iterator i = result.iterator();
while (i.hasNext()) {
SDMSScope s = (SDMSScope) i.next();
if (!s.getIsRegistered(sysEnv).booleanValue()) {
i.remove();
continue;
}
Long sId = s.getId(sysEnv);
SDMSnpSrvrSRFootprint npsfp = SDMSnpSrvrSRFootprintTable.idx_sId_getUniqueForUpdate(sysEnv, sId);
HashMap sfp = npsfp.getFp(sysEnv);
for (int j = 0; j < envv.size(); ++j) {
SDMSEnvironment env = (SDMSEnvironment) envv.get(j);
Long nrId = env.getNrId(sysEnv);
if(!sfp.containsKey(nrId)) {
i.remove();
break;
}
SDMSResource r = SDMSResourceTable.getObjectForUpdate(sysEnv, (Long) sfp.get(nrId));
if(!r.getIsOnline(sysEnv).booleanValue()) {
i.remove();
break;
}
}
}
Vector v = new Vector();
v.add(validFrom);
v.add(validTo);
v.add(result);
if (cacheEntry == null) cacheEntry = new Vector();
cacheEntry.add(v);
envJSMap.put(envId, cacheEntry);
}
return result;
}
public static void allocateAndReleaseResources(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, SDMSScope s)
throws SDMSException
{
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, sysEnv.sched, ObjectLock.EXCLUSIVE);
Long rId;
Long nrId;
Long srId;
SDMSResource r, sr;
SDMSResourceAllocation ra;
SDMSnpJobFootprint npjfp;
try {
npjfp = SDMSnpJobFootprintTable.idx_smeId_getUnique(sysEnv, sme.getId(sysEnv));
} catch (NotFoundException nfe) {
getJobFootprint(sysEnv, sme);
npjfp = SDMSnpJobFootprintTable.idx_smeId_getUnique(sysEnv, sme.getId(sysEnv));
}
HashMap fpLocal = npjfp.getFpLocal(sysEnv);
HashMap fpFolder = npjfp.getFpFolder(sysEnv);
SDMSnpSrvrSRFootprint npsfp = SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, s.getId(sysEnv));
HashMap sfp = npsfp.getFp(sysEnv);
Vector v = SDMSResourceAllocationTable.idx_smeId.getVectorForUpdate(sysEnv, sme.getId(sysEnv));
for(int i = 0; i < v.size(); i++) {
ra = (SDMSResourceAllocation) v.get(i);
rId = ra.getRId(sysEnv);
r = SDMSResourceTable.getObjectForUpdate(sysEnv, rId);
nrId = r.getNrId(sysEnv);
if(ra.getAllocationType(sysEnv).intValue() == SDMSResourceAllocation.RESERVATION) {
if(fpFolder.containsKey(nrId) || fpLocal.containsKey(nrId)) {
ra.setAllocationType(sysEnv, new Integer(SDMSResourceAllocation.ALLOCATION));
} else {
srId = (Long) sfp.get(nrId);
sr = SDMSResourceTable.getObjectForUpdate(sysEnv, srId);
if(sr.getId(sysEnv).equals(rId)) {
ra.setAllocationType(sysEnv, new Integer(SDMSResourceAllocation.ALLOCATION));
}
}
if (ra.getIsSticky(sysEnv).booleanValue()) {
}
}
if (ra.getAllocationType(sysEnv).intValue() != SDMSResourceAllocation.ALLOCATION) ra.delete(sysEnv, true, true);
}
SystemEnvironment.sched.needSched = true;
}
private void requestResourceSme(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, SDMSSchedulingEntity se, Vector sv, int type, long actVersion, int oldState)
throws SDMSException
{
SDMSScope s;
HashMap masterMap = new HashMap();
Long smeId = sme.getId(sysEnv);
Integer smeState = sme.getState(sysEnv);
boolean fitsSomewhere = false;
HashMap smefp = (HashMap) getJobFootprint(sysEnv, sme).get(FP_SCOPE);
if(sme.getState(sysEnv).intValue() == SDMSSubmittedEntity.ERROR)
return;
Vector jsv = new Vector();
Vector v;
if(checkKeptResources(sysEnv, smeId, jsv, sv)) v = jsv;
else v = sv;
jsv = new Vector();
if (checkStickyResources(sysEnv, smeId, smefp, jsv, v)) v = jsv;
try {
requestLocalResourceSme(sysEnv, sme, type, masterMap, oldState);
requestFolderResourceSme(sysEnv, sme, type, masterMap, oldState);
} catch (SDMSEscape e) {
sme.setToError(sysEnv, "Sticky Resource resolution conflict (resource to allocate not visible)");
Long tMasterId = sme.getMasterId(sysEnv);
SDMSSubmittedEntity tMsme = SDMSSubmittedEntityTable.getObject(sysEnv, tMasterId);
tMsme.suspend(sysEnv, false, false);
}
for(int j = 0; j < v.size(); ++j) {
s = (SDMSScope) v.get(j);
if (!s.getIsRegistered(sysEnv).booleanValue()) continue;
Long sId = s.getId(sysEnv);
if(!s.canExecute(sysEnv, sme))
continue;
SDMSnpSrvrSRFootprint npsfp = SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, s.getId(sysEnv));
HashMap sfp = npsfp.getFp(sysEnv);
sysEnv.tx.beginSubTransaction(sysEnv);
try {
if(fits(sysEnv, sfp, smefp, sme, false, null)) {
requestResources(sysEnv, sme, se, actVersion, sfp, type, smefp, masterMap, oldState);
try {
SDMSRunnableQueue rq = SDMSRunnableQueueTable.idx_smeId_scopeId_getUniqueForUpdate(sysEnv, new SDMSKey(smeId, sId));
rq.setState(sysEnv, smeState);
} catch (NotFoundException nfe) {
SDMSRunnableQueueTable.table.create(sysEnv, smeId, sId, smeState);
}
fitsSomewhere = true;
}
} catch (SDMSEscape e) {
sysEnv.tx.rollbackSubTransaction(sysEnv);
continue;
} catch (Exception e) {
doTrace(cEnv, ": Job " + smeId + " run into an Exception during Resource Scheduling : " + e.toString(), SEVERITY_WARNING);
sysEnv.tx.rollbackSubTransaction(sysEnv);
throw e;
}
sysEnv.tx.commitSubTransaction(sysEnv);
}
if (type == SDMSNamedResource.SYNCHRONIZING) {
Iterator it = masterMap.values().iterator();
while (it.hasNext()) {
Vector rabv = (Vector) it.next();
for (int vi = 0; vi < rabv.size(); ++vi) {
Vector rab = (Vector) rabv.get(vi);
if (((Boolean) rab.get(1)).booleanValue())
continue;
((SDMSResourceAllocation) rab.get(0)).cleanupStickyGroup(sysEnv);
}
}
}
if(!fitsSomewhere) {
sme.setToError(sysEnv, "Job cannot run in any scope because of resource shortage");
for (int j = 0; j < v.size(); ++j) {
s = (SDMSScope) v.get(j);
Long sId = s.getId(sysEnv);
if (!s.getIsRegistered(sysEnv).booleanValue()) {
continue;
}
if(!s.canExecute(sysEnv, sme)) {
continue;
}
SDMSnpSrvrSRFootprint npsfp = SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, s.getId(sysEnv));
HashMap sfp = npsfp.getFp(sysEnv);
verboseFits(sysEnv, sfp, smefp, sme, false, null);
}
}
}
public void reevaluateJSAssignment(SystemEnvironment sysEnv, SDMSSubmittedEntity sme)
throws SDMSException
{
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
SDMSScope s;
Long smeId = sme.getId(sysEnv);
boolean fitsSomewhere = false;
HashMap smefp = (HashMap) getJobFootprint(sysEnv, sme).get(FP_SCOPE);
doTrace(cEnv, ": Job " + sme.getId(sysEnv) + " is re-evaluated", SEVERITY_DEBUG);
long actVersion = sme.getSeVersion(sysEnv).longValue();
SDMSSchedulingEntity se = SDMSSchedulingEntityTable.getObject(sysEnv, sme.getSeId(sysEnv), actVersion);
if(se.getType(sysEnv).intValue() != SDMSSchedulingEntity.JOB) return;
final Vector rqv = SDMSRunnableQueueTable.idx_smeId.getVectorForUpdate(sysEnv, smeId);
final Vector sv = new Vector();
for(int j = 0; j < rqv.size(); j++) {
SDMSRunnableQueue rq = (SDMSRunnableQueue) rqv.get(j);
s = SDMSScopeTable.getObject(sysEnv, rq.getScopeId(sysEnv));
sv.add(s);
doTrace(cEnv, ": added scope id " + s.getId(sysEnv), SEVERITY_DEBUG);
}
doTrace(cEnv, ": we found " + sv.size() + " potential servers", SEVERITY_DEBUG);
for(int j = 0; j < sv.size(); ++j) {
s = (SDMSScope) sv.get(j);
Long sId = s.getId(sysEnv);
doTrace(cEnv, ": testing server " + sId, SEVERITY_DEBUG);
SDMSnpSrvrSRFootprint npsfp = SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, s.getId(sysEnv));
HashMap sfp = npsfp.getFp(sysEnv);
if(s.getIsRegistered(sysEnv).booleanValue() && fits(sysEnv, sfp, smefp, sme, true, s)) {
doTrace(cEnv, ": seems to fit *****************", SEVERITY_DEBUG);
fitsSomewhere = true;
} else {
doTrace(cEnv, ": doesn't seem to fit -+-+-+-+-+-+-+-+-", SEVERITY_DEBUG);
doTrace(cEnv, ": deleting [" + s.getId(sysEnv) + ", " + smeId + "]", SEVERITY_DEBUG);
try {
SDMSRunnableQueue rq = SDMSRunnableQueueTable.idx_smeId_scopeId_getUnique(sysEnv, new SDMSKey(smeId, sId));
rq.delete(sysEnv);
} catch (NotFoundException nfe) {
}
Iterator i = smefp.keySet().iterator();
while(i.hasNext()) {
Long L = (Long) i.next();
SDMSResource r = SDMSResourceTable.getObject(sysEnv, (Long) sfp.get(L));
Long rId = r.getId(sysEnv);
final Vector rav = SDMSResourceAllocationTable.idx_smeId_nrId.getVector(sysEnv, new SDMSKey(smeId, r.getNrId(sysEnv)));
for (int k = 0; k < rav.size(); ++k) {
SDMSResourceAllocation ra = (SDMSResourceAllocation) rav.get(k);
if (ra.getRId(sysEnv).equals(rId)) {
ra.delete(sysEnv, true, false);
break;
}
}
}
}
}
if(!fitsSomewhere) {
sme.setToError(sysEnv, "Job cannot run in any scope because of resource shortage");
}
}
private void requestFolderResourceSme(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, int type, HashMap masterMap, int oldState)
throws SDMSException
{
SDMSnpJobFootprint jfp;
Long smeId = sme.getId(sysEnv);
try {
jfp = SDMSnpJobFootprintTable.idx_smeId_getUnique(sysEnv, smeId);
HashMap fpFolder = jfp.getFpFolder(sysEnv);
requestLocalFolderResources(sysEnv, sme, smeId, fpFolder, type, masterMap, oldState);
} catch(NotFoundException nfe) {
throw new CommonErrorException(new SDMSMessage(sysEnv, "03501142150", "No footprint found for job $1", smeId));
}
}
private void requestLocalResourceSme(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, int type, HashMap masterMap, int oldState)
throws SDMSException
{
SDMSnpJobFootprint jfp;
Long smeId = sme.getId(sysEnv);
try {
jfp = SDMSnpJobFootprintTable.idx_smeId_getUniqueForUpdate(sysEnv, smeId);
HashMap fpLocal = jfp.getFpLocal(sysEnv);
requestLocalFolderResources(sysEnv, sme, smeId, fpLocal, type, masterMap, oldState);
} catch(NotFoundException nfe) {
throw new CommonErrorException(new SDMSMessage(sysEnv, "03501142151", "No footprint found for job $1", smeId));
}
}
private void requestLocalFolderResources(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, Long smeId, HashMap fp, int type, HashMap masterMap, int oldState)
throws SDMSException
{
Iterator i = fp.values().iterator();
SDMSResourceRequirement rr;
SDMSResource r;
SDMSNamedResource nr;
Long nrId;
while (i.hasNext()) {
Vector v = (Vector) i.next();
rr = (SDMSResourceRequirement) v.get(0);
r = (SDMSResource) v.get(1);
nrId = rr.getNrId(sysEnv);
nr = SDMSNamedResourceTable.getObject(sysEnv, nrId);
if(nr.getUsage(sysEnv).intValue() != type) continue;
if (rr.getIsSticky(sysEnv).booleanValue() && oldState != SDMSSubmittedEntity.SUBMITTED) continue;
createRequest(sysEnv, smeId, rr, r, nrId, type, masterMap);
}
}
private void requestResources(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, SDMSSchedulingEntity se,
long actVersion, HashMap sfp, int type, HashMap smefp, HashMap masterMap, int oldState)
throws SDMSException
{
SDMSProxy proxy;
SDMSNamedResource nr;
SDMSResource r;
SDMSResourceRequirement rr;
Long nrId, smeId;
Iterator i = null;
i = smefp.values().iterator();
smeId = sme.getId(sysEnv);
while(i.hasNext()) {
proxy = (SDMSProxy) i.next();
if(!(proxy instanceof SDMSResourceRequirement)) continue;
rr = (SDMSResourceRequirement) proxy;
if (rr.getIsSticky(sysEnv).booleanValue()) {
if (oldState != SDMSSubmittedEntity.SUBMITTED) continue;
}
nrId = rr.getNrId(sysEnv);
nr = SDMSNamedResourceTable.getObject(sysEnv, nrId);
if(nr.getUsage(sysEnv).intValue() != type) continue;
r = SDMSResourceTable.getObject(sysEnv, (Long) sfp.get(nrId));
createRequest(sysEnv, smeId, rr, r, nrId, type, masterMap);
}
}
private boolean isVisible(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, Long folderId)
throws SDMSException
{
Long seVersion = sme.getSeVersion(sysEnv);
SDMSSchedulingEntity se = SDMSSchedulingEntityTable.getObject(sysEnv, sme.getSeId(sysEnv), seVersion);
Long parentFolderId = se.getFolderId(sysEnv);
SDMSFolder f;
while (parentFolderId != null) {
if (parentFolderId.equals(folderId)) return true;
f = SDMSFolderTable.getObject(sysEnv, parentFolderId, seVersion);
parentFolderId = f.getParentId(sysEnv);
}
return false;
}
private void createRequest(SystemEnvironment sysEnv, Long smeId, SDMSResourceRequirement rr, SDMSResource r, Long nrId, int type, HashMap masterMap)
throws SDMSException
{
Integer lock;
Long rsmpId;
Long rId;
float factor = 1;
Long stickyParentId = null;
if(rr == null) return;
rId = r.getId(sysEnv);
try {
SDMSResourceAllocation ra = SDMSResourceAllocationTable.idx_smeId_rId_stickyName_getUniqueForUpdate(
sysEnv, new SDMSKey(smeId, rId, rr.getStickyName(sysEnv)));
ra.setRefcount(sysEnv, new Integer(ra.getRefcount(sysEnv).intValue() + 1));
return;
} catch (NotFoundException nfe) {
}
if(SDMSResourceAllocationTable.idx_smeId_rId_stickyName.containsKey(sysEnv, new SDMSKey(smeId, nrId, rr.getStickyName(sysEnv)))) return;
if(type == SDMSNamedResource.SYNCHRONIZING) {
lock = rr.getLockmode(sysEnv);
if(lock == null) lock = new Integer(Lockmode.N);
rsmpId = rr.getRsmpId(sysEnv);
Long stickyParentSeId = rr.getStickyParent(sysEnv);
if (rr.getIsSticky(sysEnv).booleanValue()) {
SDMSSubmittedEntity psme = SDMSSubmittedEntityTable.getObject(sysEnv, smeId);
stickyParentId = psme.getMasterId(sysEnv);
if (stickyParentSeId != null) {
Long pId = psme.getParentId(sysEnv);
while (pId != null && !stickyParentSeId.equals(psme.getSeId(sysEnv))) {
psme = SDMSSubmittedEntityTable.getObject(sysEnv, pId);
pId = psme.getParentId(sysEnv);
}
if (pId != null || stickyParentSeId.equals(psme.getSeId(sysEnv))) stickyParentId = psme.getId(sysEnv);
} else {
Long sId = r.getScopeId(sysEnv);
if (!SDMSScopeTable.table.exists(sysEnv, sId)) {
if (SDMSFolderTable.table.exists(sysEnv, sId)) {
Long stickyParentCandidate = smeId;
SDMSSubmittedEntity sme = SDMSSubmittedEntityTable.getObject(sysEnv, smeId);
Long parentId = sme.getParentId(sysEnv);
while (parentId != null) {
sme = SDMSSubmittedEntityTable.getObject(sysEnv, parentId);
if (isVisible(sysEnv, sme, sId))
stickyParentCandidate = parentId;
parentId = sme.getParentId(sysEnv);
}
stickyParentId = stickyParentCandidate;
} else {
stickyParentId = sId;
}
}
}
}
} else {
lock = new Integer(Lockmode.N);
rsmpId = null;
}
Integer reqAmount = new Integer((int) Math.ceil(rr.getAmount(sysEnv).intValue() * factor));
if (rr.getIsSticky(sysEnv).booleanValue()) {
String stickyName = rr.getStickyName(sysEnv);
Long nStickyParentId = new Long(- stickyParentId.longValue());
SDMSKey masterKey = new SDMSKey(nStickyParentId, stickyName, nrId);
Vector ravok = (Vector) masterMap.get(masterKey);
if (ravok == null) {
Vector rav = SDMSResourceAllocationTable.idx_smeId_nrId.getVector(sysEnv, new SDMSKey(nStickyParentId, nrId));
ravok = new Vector();
for (int i = 0; i < rav.size(); ++i) {
SDMSResourceAllocation ra = (SDMSResourceAllocation) rav.get(i);
String raName = ra.getStickyName(sysEnv);
if ((raName == null && stickyName == null) || (raName != null && raName.equals(stickyName))) {
Vector entry = new Vector();
entry.add(ra);
entry.add(Boolean.FALSE);
ravok.add(entry);
}
}
masterMap.put(masterKey, ravok);
}
if (ravok.size() != 0) {
boolean raOK = false;
for (int i = 0; i < ravok.size(); ++i) {
Vector entry = (Vector) ravok.get(i);
SDMSResourceAllocation ra = (SDMSResourceAllocation) entry.get(0);
if (ra.getRId(sysEnv).equals(rId)) {
raOK = true;
entry.set(1, Boolean.TRUE);
int raReqAmount = ra.getOrigAmount(sysEnv).intValue();
int raLockMode = ra.getLockmode(sysEnv).intValue();
if (raReqAmount < reqAmount.intValue()) {
if (ra.getAllocationType(sysEnv).intValue() == SDMSResourceAllocation.MASTER_RESERVATION) {
SDMSSubmittedEntity sme = SDMSSubmittedEntityTable.getObject(sysEnv, smeId);
SDMSSchedulingEntity se = SDMSSchedulingEntityTable.getObject(sysEnv,
sme.getSeId(sysEnv), sme.getSeVersion(sysEnv));
throw new CommonErrorException(new SDMSMessage(sysEnv, "03405261410",
"Invalid amount escalation for already reserved sticky resource $1, job definition $2",
rId, se.pathString(sysEnv)));
}
ra.setOrigAmount(sysEnv, reqAmount);
ra.setAmount(sysEnv, reqAmount);
}
raLockMode &= lock.intValue();
if (raLockMode != ra.getLockmode(sysEnv).intValue()) {
if (ra.getAllocationType(sysEnv).intValue() == SDMSResourceAllocation.MASTER_RESERVATION) {
SDMSSubmittedEntity sme = SDMSSubmittedEntityTable.getObject(sysEnv, smeId);
SDMSSchedulingEntity se = SDMSSchedulingEntityTable.getObject(sysEnv,
sme.getSeId(sysEnv), sme.getSeVersion(sysEnv));
throw new CommonErrorException(new SDMSMessage(sysEnv, "03405261415",
"Invalid lock escalation for already reserved sticky resource $1, job definition $2",
rId, se.pathString(sysEnv)));
}
ra.setLockmode(sysEnv, new Integer(raLockMode));
}
int refCount = ra.getRefcount(sysEnv).intValue();
ra.setRefcount(sysEnv, new Integer(refCount + 1));
break;
}
}
if (!raOK) {
throw new SDMSEscape();
}
} else {
try {
SDMSResourceAllocationTable.table.create(sysEnv,
rId, nStickyParentId, nrId,
reqAmount,
reqAmount,
rr.getKeepMode(sysEnv),
rr.getIsSticky(sysEnv),
rr.getStickyName(sysEnv),
stickyParentId,
new Integer(SDMSResourceAllocation.MASTER_REQUEST),
null,
lock,
ONE);
} catch (DuplicateKeyException dke) {
}
}
}
SDMSResourceAllocationTable.table.create(sysEnv,
rId, smeId, nrId,
reqAmount,
reqAmount,
rr.getKeepMode(sysEnv),
rr.getIsSticky(sysEnv),
rr.getStickyName(sysEnv),
stickyParentId,
new Integer(SDMSResourceAllocation.REQUEST),
rsmpId,
lock,
ONE);
}
private Vector findRelevantJobserver (SystemEnvironment sysEnv, SDMSSubmittedEntity sme)
throws SDMSException
{
Vector sv = new Vector();
Vector rqv = SDMSRunnableQueueTable.idx_smeId.getVectorForUpdate(sysEnv, sme.getId(sysEnv));
for(int j = 0; j < rqv.size(); j++) {
SDMSRunnableQueue rq = (SDMSRunnableQueue) rqv.get(j);
SDMSScope s = SDMSScopeTable.getObject(sysEnv, rq.getScopeId(sysEnv));
sv.add(s);
}
return sv;
}
private void syncScheduleSme(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, Locklist resourceChain)
throws SDMSException
{
Vector rqv;
SDMSScope s;
Long sId;
Long smeId = sme.getId(sysEnv);
boolean resourcesReserved = false;
Vector smefpv = getJobFootprint(sysEnv, sme);
HashMap smefp = (HashMap) smefpv.get(FP_SCOPE);
Vector jsv = new Vector();
Vector v;
long actVersion = sme.getSeVersion(sysEnv).longValue();
SDMSSchedulingEntity se = SDMSSchedulingEntityTable.getObject(sysEnv, sme.getSeId(sysEnv), actVersion);
v = findRelevantJobserver(sysEnv, sme);
final HashMap fpFolder = (HashMap) smefpv.get(FP_FOLDER);
final HashMap fpLocal = (HashMap) smefpv.get(FP_LOCAL);
resourcesReserved = reserveFp(sysEnv, sme, se, resourceChain, fpFolder, SDMSNamedResource.SYNCHRONIZING) &&
reserveFp(sysEnv, sme, se, resourceChain, fpLocal, SDMSNamedResource.SYNCHRONIZING);
if(!resourcesReserved) {
checkTimeout(sysEnv, sme, se, actVersion);
return;
}
resourcesReserved = false;
for(int j = 0; j < v.size(); ++j) {
s = (SDMSScope) v.get(j);
sId = s.getId(sysEnv);
SDMSnpSrvrSRFootprint npsfp = SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, sId);
HashMap sfp = npsfp.getFp(sysEnv);
if(!checkStaticResources(sysEnv, sfp, smefp)) continue;
Iterator rrvi = smefp.values().iterator();
if(reserveSyncResources(sysEnv, sme, se, actVersion, sfp, resourceChain, rrvi)) {
resourcesReserved = true;
SDMSRunnableQueue rq = SDMSRunnableQueueTable.idx_smeId_scopeId_getUniqueForUpdate(sysEnv, new SDMSKey(smeId, sId));
rq.setState(sysEnv, new Integer(SDMSSubmittedEntity.RESOURCE_WAIT));
}
}
if(resourcesReserved) {
Vector rv = SDMSResourceAllocationTable.idx_smeId.getVectorForUpdate(sysEnv, smeId);
for(int i = 0; i < rv.size(); i++) {
SDMSResourceAllocation ra = (SDMSResourceAllocation) rv.get(i);
if(ra.getAllocationType(sysEnv).intValue() == SDMSResourceAllocation.REQUEST) {
SDMSNamedResource nr = SDMSNamedResourceTable.getObject(sysEnv, ra.getNrId(sysEnv));
if(nr.getUsage(sysEnv).intValue() != SDMSNamedResource.SYNCHRONIZING)
continue;
ra.delete(sysEnv, true, true);
}
}
rqv = SDMSRunnableQueueTable.idx_smeId.getVector(sysEnv, smeId);
for(int i = 0; i < rqv.size(); i++) {
SDMSRunnableQueue rq = (SDMSRunnableQueue) rqv.get(i);
if(rq.getState(sysEnv).intValue() != SDMSSubmittedEntity.RESOURCE_WAIT) {
rq.delete(sysEnv);
}
}
sme.setState(sysEnv, new Integer(SDMSSubmittedEntity.RESOURCE_WAIT));
} else {
checkTimeout(sysEnv, sme, se, actVersion);
}
}
private boolean checkKeptResources(SystemEnvironment sysEnv, Long smeId, Vector jsv, Vector sv)
throws SDMSException
{
SDMSScope s;
Vector rav = SDMSResourceAllocationTable.idx_smeId.getVectorForUpdate(sysEnv, smeId);
boolean jsv_used = false;
for(int j = 0; j < rav.size(); j++) {
SDMSResourceAllocation ra = (SDMSResourceAllocation) rav.get(j);
int raAllocationType = ra.getAllocationType(sysEnv).intValue();
if(raAllocationType == SDMSResourceAllocation.ALLOCATION) {
if(!jsv_used) {
for(int i = 0; i < sv.size(); i++) {
s = (SDMSScope) sv.get(i);
SDMSnpSrvrSRFootprint npsfp = SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, s.getId(sysEnv));
HashMap sfp = npsfp.getFp(sysEnv);
if(sfp.containsValue(ra.getRId(sysEnv))) {
jsv.addElement(s);
jsv_used = true;
}
}
} else {
Iterator jsi = jsv.iterator();
while(jsi.hasNext()) {
s = (SDMSScope) jsi.next();
SDMSnpSrvrSRFootprint npsfp = SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, s.getId(sysEnv));
HashMap sfp = npsfp.getFp(sysEnv);
if(!sfp.containsValue(ra.getRId(sysEnv))) {
jsi.remove();
}
}
}
}
}
return jsv_used;
}
private boolean checkStickyResources(SystemEnvironment sysEnv, Long smeId, HashMap smefp, Vector jsv, Vector v)
throws SDMSException
{
Vector rav = SDMSResourceAllocationTable.idx_smeId.getVector(sysEnv, smeId);
SDMSResourceAllocation ra;
Iterator ravi = rav.iterator();
while (ravi.hasNext()) {
ra = (SDMSResourceAllocation) ravi.next();
if (!ra.getIsSticky(sysEnv).booleanValue()) {
ravi.remove();
continue;
}
if (!smefp.containsKey(ra.getNrId(sysEnv))) {
ravi.remove();
continue;
}
}
if (rav.size() == 0) return false;
for (int i = 0; i < v.size(); ++i) {
SDMSScope s = (SDMSScope) v.get(i);
SDMSnpSrvrSRFootprint npsfp = SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, s.getId(sysEnv));
HashMap sfp = npsfp.getFp(sysEnv);
ravi = rav.iterator();
while (ravi.hasNext()) {
ra = (SDMSResourceAllocation) ravi.next();
if(sfp.containsValue(ra.getRId(sysEnv))) {
jsv.addElement(s);
}
}
}
return true;
}
private void checkTimeout(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, SDMSSchedulingEntity se, long actVersion)
throws SDMSException
{
Integer toBase = se.getTimeoutBase(sysEnv);
if(toBase != null) {
long toTime = se.getTimeoutAmount(sysEnv).longValue();
switch(toBase.intValue()) {
case SDMSInterval.MINUTE:
toTime *= SDMSInterval.MINUTE_DUR;
break;
case SDMSInterval.HOUR:
toTime *= SDMSInterval.HOUR_DUR;
break;
case SDMSInterval.DAY:
toTime *= SDMSInterval.DAY_DUR;
break;
case SDMSInterval.WEEK:
toTime *= SDMSInterval.WEEK_DUR;
break;
case SDMSInterval.MONTH:
toTime *= SDMSInterval.MONTH_DUR;
break;
case SDMSInterval.YEAR:
toTime *= SDMSInterval.YEAR_DUR;
break;
}
java.util.Date ldts = new java.util.Date();
long ts = ldts.getTime();
long nextTimeout = sme.getSyncTs(sysEnv).longValue() + toTime;
if(nextTimeout < ts) {
doTrace(cEnv, ": Job " + sme.getId(sysEnv) + " run into timeout", SEVERITY_MESSAGE);
Long esdId, espId;
esdId = se.getTimeoutStateId(sysEnv);
espId = se.getEspId(sysEnv);
SDMSExitState es = SDMSExitStateTable.idx_espId_esdId_getUnique(sysEnv, new SDMSKey(espId, esdId), actVersion);
sme.changeState(sysEnv, esdId, es, sme.getExitCode(sysEnv), "Timeout", null );
} else {
if(nextTimeout < timeoutWakeup) timeoutWakeup = nextTimeout;
}
}
}
public void resourceSchedule(SystemEnvironment sysEnv, Locklist resourceChain)
throws SDMSException
{
SDMSSubmittedEntity sme;
Vector sv;
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
sv = SDMSSubmittedEntityTable.idx_state.getVectorForUpdate(sysEnv, new Integer(SDMSSubmittedEntity.RESOURCE_WAIT), null, Integer.MAX_VALUE);
doTrace(cEnv, "Number of Jobs in RESOURCE_WAIT : " + sv.size(), SEVERITY_DEBUG);
pc.setNow();
Collections.sort(sv, pc);
for(int i = 0; i < sv.size(); ++i) {
sme = (SDMSSubmittedEntity) sv.get(i);
if(sme.getIsSuspended(sysEnv).intValue() != SDMSSubmittedEntity.NOSUSPEND || sme.getParentSuspended(sysEnv).intValue() > 0)
continue;
resourceScheduleSme(sysEnv, sme, resourceChain);
}
}
private void resourceScheduleSme(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, Locklist resourceChain)
throws SDMSException
{
SDMSRunnableQueue rq;
boolean resourcesReserved = false;
Long smeId = sme.getId(sysEnv);
long actVersion = sme.getSeVersion(sysEnv).longValue();
SDMSSchedulingEntity se = SDMSSchedulingEntityTable.getObject(sysEnv, sme.getSeId(sysEnv), actVersion);
final Vector fpv = getJobFootprint(sysEnv, sme);
final HashMap fp = (HashMap) fpv.get(FP_SCOPE);
final HashMap fpFolder = (HashMap) fpv.get(FP_FOLDER);
final HashMap fpLocal = (HashMap) fpv.get(FP_LOCAL);
resourcesReserved = reserveFp(sysEnv, sme, se, resourceChain, fpLocal, SDMSNamedResource.SYSTEM);
if(resourcesReserved)
resourcesReserved = reserveFp(sysEnv, sme, se, resourceChain, fpFolder, SDMSNamedResource.SYSTEM);
if(!resourcesReserved) {
checkTimeout(sysEnv, sme, se, actVersion);
return;
}
resourcesReserved = false;
Vector v = SDMSRunnableQueueTable.idx_smeId.getVectorForUpdate(sysEnv, smeId);
if(v.size() == 0) {
doTrace(cEnv, ": Job " + sme.getId(sysEnv) + " cannot run in any scope", SEVERITY_WARNING);
return;
}
SDMSScope s = null;
for(int j = 0; j < v.size(); ++j) {
rq = (SDMSRunnableQueue) v.get(j);
s = SDMSScopeTable.getObject(sysEnv, rq.getScopeId(sysEnv));
Iterator it = fp.values().iterator();
HashMap sfp = (SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, s.getId(sysEnv))).getFp(sysEnv);
if(reserveSysResources(sysEnv, sme, sfp, resourceChain, it)) {
resourcesReserved = true;
rq.setState(sysEnv, new Integer(SDMSSubmittedEntity.RUNNABLE));
allocateAndReleaseResources(sysEnv, sme, s);
break;
}
}
if(resourcesReserved) {
doTrace(cEnv, ": Job " + smeId + " added to Runnable Queue " + s.getId(sysEnv), SEVERITY_DEBUG);
Vector rv = SDMSResourceAllocationTable.idx_smeId.getVector(sysEnv, smeId);
for(int i = 0; i < rv.size(); i++) {
SDMSResourceAllocation ra = (SDMSResourceAllocation) rv.get(i);
if(ra.getAllocationType(sysEnv).intValue() == SDMSResourceAllocation.REQUEST) {
SDMSNamedResource nr = SDMSNamedResourceTable.getObject(sysEnv, ra.getNrId(sysEnv));
if(nr.getUsage(sysEnv).intValue() != SDMSNamedResource.SYSTEM)
continue;
ra.delete(sysEnv, true, true);
}
}
Vector rqv = SDMSRunnableQueueTable.idx_smeId.getVector(sysEnv, smeId);
for(int i = 0; i < rqv.size(); i++) {
rq = (SDMSRunnableQueue) rqv.get(i);
if(rq.getState(sysEnv).intValue() != SDMSSubmittedEntity.RUNNABLE) {
rq.delete(sysEnv);
}
}
sme.setState(sysEnv, new Integer(SDMSSubmittedEntity.RUNNABLE));
sysEnv.notifier.addJobServerToNotify(s.getId(sysEnv));
} else {
checkTimeout(sysEnv, sme, se, actVersion);
}
}
public static boolean fits(SystemEnvironment sysEnv, HashMap scopeFp, HashMap smeFp, SDMSSubmittedEntity sme, boolean checkCondition, SDMSScope evalScope)
throws SDMSException
{
if (sysEnv.maxWriter > 1 && sysEnv.tx.mode == SDMSTransaction.READWRITE)
LockingSystem.lock(sysEnv, sysEnv.sched, ObjectLock.EXCLUSIVE);
Iterator i = smeFp.keySet().iterator();
while(i.hasNext()) {
Long L = (Long) i.next();
if(!scopeFp.containsKey(L)) {
return false;
}
SDMSResource r = SDMSResourceTable.getObject(sysEnv, (Long) scopeFp.get(L));
Integer sAmount = r.getRequestableAmount(sysEnv);
SDMSResourceRequirement rr;
SDMSEnvironment e;
SDMSProxy p = (SDMSProxy) smeFp.get(L);
if (p instanceof SDMSResourceRequirement) {
rr = (SDMSResourceRequirement) p;
e = null;
} else {
e = (SDMSEnvironment) p;
rr = null;
}
if(checkCondition) {
String condition = (rr == null ? e.getCondition(sysEnv) : rr.getCondition(sysEnv));
if (condition != null) {
final BoolExpr be = new BoolExpr(condition);
try {
if (! be.checkCondition(sysEnv, r, sme, null, null, evalScope)) {
return false;
}
} catch (CommonErrorException cee) {
if (sysEnv.tx.mode == SDMSTransaction.READWRITE) {
SDMSNamedResource nr;
if (rr != null) {
nr = SDMSNamedResourceTable.getObject(sysEnv, rr.getNrId(sysEnv));
} else {
nr = SDMSNamedResourceTable.getObject(sysEnv, e.getNrId(sysEnv));
}
String msg = cee.toString() + " evaluating the condition for resource " + nr.pathString(sysEnv);
}
return false;
}
}
}
Integer jAmount;
if(rr == null) jAmount = new Integer(0);
else jAmount = rr.getAmount(sysEnv);
if(sAmount == null)
continue;
if(jAmount.compareTo(sAmount) > 0) {
return false;
}
}
return true;
}
private static boolean verboseFits(SystemEnvironment sysEnv, HashMap scopeFp, HashMap smeFp, SDMSSubmittedEntity sme, boolean checkCondition, SDMSScope evalScope)
throws SDMSException
{
Iterator i = smeFp.keySet().iterator();
while(i.hasNext()) {
Long L = (Long) i.next();
if(!scopeFp.containsKey(L))
return false;
SDMSResource r = SDMSResourceTable.getObject(sysEnv, (Long) scopeFp.get(L));
Integer sAmount = r.getRequestableAmount(sysEnv);
SDMSResourceRequirement rr;
SDMSEnvironment e;
SDMSProxy p = (SDMSProxy) smeFp.get(L);
if (p instanceof SDMSResourceRequirement) {
rr = (SDMSResourceRequirement) p;
e = null;
} else {
e = (SDMSEnvironment) p;
rr = null;
}
if(checkCondition) {
String condition = (rr == null ? e.getCondition(sysEnv) : rr.getCondition(sysEnv));
if (condition != null) {
final BoolExpr be = new BoolExpr(condition);
try {
if (! be.checkCondition(sysEnv, r, sme, null, null, evalScope)) return false;
} catch (CommonErrorException cee) {
SDMSNamedResource nr;
if (rr != null) {
nr = SDMSNamedResourceTable.getObject(sysEnv, rr.getNrId(sysEnv));
} else {
nr = SDMSNamedResourceTable.getObject(sysEnv, e.getNrId(sysEnv));
}
String msg = cee.toString() + " evaluating the condition for resource " + nr.pathString(sysEnv);
return false;
}
}
}
Integer jAmount;
if(rr == null) jAmount = new Integer(0);
else jAmount = rr.getAmount(sysEnv);
if(sAmount == null)
continue;
if(jAmount.compareTo(sAmount) > 0)
return false;
}
return true;
}
private boolean checkStaticResources(SystemEnvironment sysEnv, HashMap scopeFp, HashMap smeFp)
throws SDMSException
{
Iterator i = smeFp.keySet().iterator();
while(i.hasNext()) {
Long L = (Long) i.next();
if(!scopeFp.containsKey(L))
return false;
SDMSResource r = SDMSResourceTable.getObjectForUpdate(sysEnv, (Long) scopeFp.get(L));
SDMSNamedResource nr = SDMSNamedResourceTable.getObject(sysEnv, r.getNrId(sysEnv));
if(nr.getUsage(sysEnv).intValue() != SDMSNamedResource.STATIC) continue;
if(!r.getIsOnline(sysEnv).booleanValue()) return false;
}
return true;
}
private boolean reserveSyncResources(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, SDMSSchedulingEntity se, long actVersion, HashMap sfp, Locklist resourceChain, Iterator i)
throws SDMSException
{
SDMSResourceRequirement rr;
SDMSProxy proxy;
SDMSResource r;
Long smeId = sme.getId(sysEnv);
Long nrId;
Long rId;
Long stickyParent;
Long nStickyParent;
String rrStickyName;
SDMSResourceAllocation ra = null;
SDMSResourceAllocation mra = null;
boolean isSticky;
boolean allocSucceeded = true;
int waitAmount;
Lockmode waitLock;
Reservator rsrv = null;
sysEnv.tx.beginSubTransaction(sysEnv);
try {
Vector srv = new Vector();
while(i.hasNext()) {
proxy = (SDMSProxy) i.next();
if (!(proxy instanceof SDMSResourceRequirement)) continue;
rr = (SDMSResourceRequirement) proxy;
nrId = rr.getNrId(sysEnv);
SDMSNamedResource nr = SDMSNamedResourceTable.getObject(sysEnv, nrId);
if(nr.getUsage(sysEnv).intValue() != SDMSNamedResource.SYNCHRONIZING) continue;
r = SDMSResourceTable.getObject(sysEnv, (Long) sfp.get(nrId));
rId = r.getId(sysEnv);
try {
ra = SDMSResourceAllocationTable.idx_smeId_rId_stickyName_getUnique(sysEnv, new SDMSKey(smeId, rId, rr.getStickyName(sysEnv)));
} catch (NotFoundException nfe) {
if(SDMSResourceAllocationTable.idx_smeId_rId_stickyName.containsKey(sysEnv, new SDMSKey(smeId, nrId, rr.getStickyName(sysEnv))))
continue;
doTrace(cEnv, ": Job " + smeId + " needs a resource " + nrId + "/" + rId +
" which is neither requested/reserved/allocated nor ignored", SEVERITY_ERROR);
continue;
}
int allocType = ra.getAllocationType(sysEnv).intValue();
if(allocType == SDMSResourceAllocation.IGNORE) continue;
if(allocType == SDMSResourceAllocation.RESERVATION) continue;
if(allocType == SDMSResourceAllocation.ALLOCATION) continue;
if(SDMSResourceAllocationTable.idx_smeId_rId_stickyName.containsKey(sysEnv, new SDMSKey(smeId, nrId, rr.getStickyName(sysEnv))))
continue;
if(resourceChain != null) {
rsrv = resourceChain.get(rId);
}
if(rsrv == null) rsrv = new Reservator(rId, smeId);
else rsrv = new Reservator(rId, smeId, rsrv.amount, rsrv.lock.getLockmode());
isSticky = rr.getIsSticky(sysEnv).booleanValue();
if(isSticky) {
stickyParent = ra.getStickyParent(sysEnv);
nStickyParent = new Long(- stickyParent.longValue());
rrStickyName = rr.getStickyName(sysEnv);
try {
mra = SDMSResourceAllocationTable.idx_smeId_rId_stickyName_getUnique(sysEnv,
new SDMSKey(nStickyParent, rId, rrStickyName));
} catch (NotFoundException nfe) {
mra = createUpgradeMasterRequest(sysEnv, sme, rr, r, actVersion);
}
MasterReservationInfo mri = checkMasterReservation(sysEnv, sme, rr, stickyParent, r, rsrv);
if(mri.mustAllocate && (resourceChain != null)) {
resourceChain.set(new Reservator(rId, nStickyParent, mri.amount, mri.lockmode));
srv.add(mra);
}
if(!mri.canAllocate) {
if(resourceChain != null) {
allocSucceeded = false;
continue;
}
throw new SDMSEscape();
}
if(mri.mustAllocate) {
mra.setAllocationType(sysEnv, new Integer(SDMSResourceAllocation.MASTER_RESERVATION));
}
}
waitAmount = rsrv.amount;
waitLock = rsrv.lock;
int reason = r.checkAllocate(sysEnv, rr, sme, ra, waitAmount, waitLock);
if(resourceChain != null)
resourceChain.set(new Reservator(rId, smeId, rr.getAmount(sysEnv).intValue(), rr.getLockmode(sysEnv).intValue()));
if(reason != SDMSResource.REASON_AVAILABLE) {
if(resourceChain == null) throw new SDMSEscape();
if((reason & (SDMSResource.REASON_STATE|SDMSResource.REASON_EXPIRE|SDMSResource.REASON_OFFLINE)) != 0) {
resourceChain.removeSme(smeId);
throw new SDMSEscape();
}
allocSucceeded = false;
continue;
}
ra.setAllocationType(sysEnv, new Integer(SDMSResourceAllocation.RESERVATION));
if(isSticky) {
int mAmount = mra.getAmount(sysEnv).intValue();
int raAmount = ra.getAmount(sysEnv).intValue();
mra.setAmount(sysEnv, new Integer(mAmount - raAmount));
}
}
if(!allocSucceeded) throw new SDMSEscape();
if(resourceChain != null) {
resourceChain.removeSme(smeId);
for(int j = 0; j < srv.size(); j++) {
mra = (SDMSResourceAllocation) srv.get(j);
resourceChain.remove(mra.getRId(sysEnv), mra.getSmeId(sysEnv));
}
}
} catch(SDMSEscape e) {
sysEnv.tx.rollbackSubTransaction(sysEnv);
return false;
}
sysEnv.tx.commitSubTransaction(sysEnv);
return true;
}
private boolean reserveFp(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, SDMSSchedulingEntity se, Locklist resourceChain, HashMap lf_fp, int type)
throws SDMSException
{
HashMap fp = new HashMap();
Vector rrv = new Vector();
Iterator i = lf_fp.values().iterator();
while(i.hasNext()) {
Vector v = (Vector) i.next();
SDMSResource r = (SDMSResource) v.get(1);
fp.put(r.getNrId(sysEnv), r.getId(sysEnv));
rrv.add(v.get(0));
}
if(rrv.size() == 0) return true;
if(type == SDMSNamedResource.SYSTEM)
return reserveSysResources(sysEnv, sme, fp, resourceChain, rrv.iterator());
return reserveSyncResources(sysEnv, sme, se, sme.getSeVersion(sysEnv).longValue(), fp, resourceChain, rrv.iterator());
}
private SDMSResourceAllocation createUpgradeMasterRequest(SystemEnvironment sysEnv, SDMSSubmittedEntity sme,
SDMSResourceRequirement rr, SDMSResource r, long actVersion)
throws SDMSException
{
SDMSSubmittedEntity tsme;
SDMSResourceRequirement trr;
final Long nrId = rr.getNrId(sysEnv);
final Long rId = r.getId(sysEnv);
Long seId = null;
SDMSResourceAllocation ra;
float factor = 1;
Vector v;
int lockmode = Lockmode.N;
int amount = 0;
int refcount = 0;
v = SDMSSubmittedEntityTable.idx_masterId.getVectorForUpdate(sysEnv, sme.getMasterId(sysEnv));
for(int i = 0; i < v.size(); i++) {
tsme = (SDMSSubmittedEntity) v.get(i);
int state = tsme.getState(sysEnv).intValue();
if(state != SDMSSubmittedEntity.SUBMITTED &&
state != SDMSSubmittedEntity.DEPENDENCY_WAIT &&
state != SDMSSubmittedEntity.SYNCHRONIZE_WAIT) continue;
if(tsme.getJobIsFinal(sysEnv).booleanValue()) continue;
seId = tsme.getSeId(sysEnv);
try {
trr = SDMSResourceRequirementTable.idx_seId_nrId_getUnique(sysEnv, new SDMSKey(seId, nrId), actVersion);
} catch (NotFoundException nfe) {
continue;
}
if(! trr.getIsSticky(sysEnv).booleanValue())
continue;
try {
ra = SDMSResourceAllocationTable.idx_smeId_rId_stickyName_getUnique(sysEnv, new SDMSKey(tsme.getId(sysEnv), rId, null));
if(ra.getAllocationType(sysEnv).intValue() == SDMSResourceAllocation.IGNORE) continue;
} catch (NotFoundException nfe) { }
refcount++;
lockmode &= trr.getLockmode(sysEnv).intValue();
int tmp = trr.getAmount(sysEnv).intValue();
if(tmp > amount) amount = tmp;
}
ra = SDMSResourceAllocationTable.table.create(sysEnv,
rId, new Long(- sme.getMasterId(sysEnv)), nrId,
new Integer(amount),
new Integer(amount),
rr.getKeepMode(sysEnv),
Boolean.TRUE,
null,
sme.getMasterId(sysEnv),
new Integer(SDMSResourceAllocation.MASTER_REQUEST),
null,
new Integer(lockmode),
new Integer(refcount));
return ra;
}
public MasterReservationInfo checkMasterReservation(SystemEnvironment sysEnv, SDMSSubmittedEntity sme,
SDMSResourceRequirement rr, Long stickyParent, SDMSResource r)
throws SDMSException
{
return checkMasterReservation(sysEnv, sme, rr, stickyParent, r, new Reservator(r.getId(sysEnv), sme.getId(sysEnv)));
}
public MasterReservationInfo checkMasterReservation(SystemEnvironment sysEnv, SDMSSubmittedEntity sme,
SDMSResourceRequirement rr, Long stickyParent, SDMSResource r, Reservator rsrv)
throws SDMSException
{
if (sysEnv.maxWriter > 1 && sysEnv.tx.mode == SDMSTransaction.READWRITE)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
SDMSSubmittedEntity tsme;
SDMSResourceRequirement trr;
final Long nrId = rr.getNrId(sysEnv);
final Long rId = r.getId(sysEnv);
final MasterReservationInfo mri = new MasterReservationInfo();
SDMSResourceAllocation ra = null;
float factor = 1;
String rrStickyName = rr.getStickyName(sysEnv);
Long nStickyParent = new Long(- stickyParent.longValue());
mri.stickyName = rrStickyName;
mri.stickyParent = stickyParent;
Vector v;
int lockmode = Lockmode.N;
int amount = 0;
try {
ra = SDMSResourceAllocationTable.idx_smeId_rId_stickyName_getUnique(sysEnv, new SDMSKey(nStickyParent, rId, rrStickyName));
mri.amount = ra.getAmount(sysEnv).intValue();
mri.lockmode = ra.getLockmode(sysEnv).intValue();
} catch (NotFoundException nfe) {
return mri;
}
if (ra.getAllocationType(sysEnv).intValue() == SDMSResourceAllocation.MASTER_RESERVATION) {
mri.mustAllocate = false;
mri.canAllocate = true;
return mri;
}
int cAmount = (int) Math.ceil(mri.amount * factor);
if(!r.checkAmount(sysEnv, cAmount, mri.amount, rsrv.amount)) {
mri.mustAllocate = false;
mri.canAllocate = false;
return mri;
}
if(!r.syncCheckLockmode(sysEnv, mri.lockmode, rsrv.lock)) {
mri.mustAllocate = false;
mri.canAllocate = false;
return mri;
}
mri.mustAllocate = true;
mri.canAllocate = true;
mri.amount = cAmount;
mri.lockmode = lockmode;
return mri;
}
private boolean reserveSysResources(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, HashMap sfp, Locklist resourceChain, Iterator it)
throws SDMSException
{
SDMSResourceRequirement rr;
SDMSProxy proxy;
Long smeId = sme.getId(sysEnv);
Long rId;
Long nrId;
Reservator rsrv = null;
boolean allocSucceeded = true;
sysEnv.tx.beginSubTransaction(sysEnv);
try {
while(it.hasNext()) {
proxy = (SDMSProxy) it.next();
if (!(proxy instanceof SDMSResourceRequirement)) continue;
rr = (SDMSResourceRequirement) proxy;
nrId = rr.getNrId(sysEnv);
SDMSNamedResource nr = SDMSNamedResourceTable.getObject(sysEnv, nrId);
if(nr.getUsage(sysEnv).intValue() != SDMSNamedResource.SYSTEM) continue;
SDMSResource r = SDMSResourceTable.getObjectForUpdate(sysEnv, (Long) sfp.get(nrId));
rId = r.getId(sysEnv);
SDMSResourceAllocation ra = null;
try {
ra = SDMSResourceAllocationTable.idx_smeId_rId_stickyName_getUniqueForUpdate(sysEnv, new SDMSKey(smeId, rId, null));
} catch (NotFoundException nfe) {
if(SDMSResourceAllocationTable.idx_smeId_rId_stickyName.containsKey(sysEnv, new SDMSKey(smeId, nrId, null))) continue;
doTrace(cEnv, ": Job " + smeId + " needs a resource " + nrId + "/" + rId +
" which is neither requested/reserved/allocated nor ignored", SEVERITY_ERROR);
continue;
}
int allocType = ra.getAllocationType(sysEnv).intValue();
if(allocType == SDMSResourceAllocation.IGNORE) continue;
if(allocType == SDMSResourceAllocation.RESERVATION) continue;
if(allocType == SDMSResourceAllocation.ALLOCATION) continue;
if(resourceChain != null) {
rsrv = resourceChain.get(rId);
}
if(rsrv == null) rsrv = new Reservator(rId, smeId);
int waitAmount = rsrv.amount;
Lockmode waitLock = rsrv.lock;
int reason = r.checkAllocate(sysEnv, rr, sme, ra, waitAmount, waitLock);
if(reason != SDMSResource.REASON_AVAILABLE) {
if(resourceChain != null && reason != SDMSResource.REASON_OFFLINE) {
resourceChain.set(new Reservator(rId, smeId, rr.getAmount(sysEnv).intValue(), Lockmode.N));
allocSucceeded = false;
continue;
}
throw new SDMSEscape();
}
ra.setAllocationType(sysEnv, new Integer(SDMSResourceAllocation.RESERVATION));
}
if(!allocSucceeded) throw new SDMSEscape();
if(resourceChain != null) {
resourceChain.removeSme(smeId);
}
} catch(SDMSEscape e) {
sysEnv.tx.rollbackSubTransaction(sysEnv);
return false;
}
sysEnv.tx.commitSubTransaction(sysEnv);
return true;
}
private void merge(HashMap target, HashMap source)
{
Long L;
Iterator i = source.keySet().iterator();
while(i.hasNext()) {
L = (Long) i.next();
if(!target.containsKey(L))
target.put(L, source.get(L));
}
}
public HashMap getScopeFootprint(SystemEnvironment sysEnv, SDMSScope s)
throws SDMSException
{
if (sysEnv.maxWriter > 1 && sysEnv.tx.mode == SDMSTransaction.READWRITE)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
SDMSResource r;
SDMSScope ps;
HashMap fp = new HashMap();
HashMap tfp;
Long sId = s.getId(sysEnv);
Long psId;
Vector v = SDMSResourceTable.idx_scopeId.getVector(sysEnv, sId);
for(int i = 0; i < v.size(); i++) {
r = (SDMSResource) v.get(i);
fp.put(r.getNrId(sysEnv), r.getId(sysEnv));
}
psId = s.getParentId(sysEnv);
if(psId != null) {
ps = SDMSScopeTable.getObject(sysEnv, psId);
tfp = getScopeFootprint(sysEnv, ps);
merge(fp,tfp);
}
return fp;
}
public static Vector getJobFootprint(SystemEnvironment sysEnv, SDMSSubmittedEntity sme)
throws SDMSException
{
if (sysEnv.maxWriter > 1 && sysEnv.tx.mode == SDMSTransaction.READWRITE)
LockingSystem.lock(sysEnv, sysEnv.sched, ObjectLock.EXCLUSIVE);
SDMSnpJobFootprint jfp;
SDMSSchedulingEntity se;
SDMSEnvironment e;
SDMSResourceRequirement rr;
HashMap fp;
Long smeId = sme.getId(sysEnv);
try {
jfp = SDMSnpJobFootprintTable.idx_smeId_getUniqueForUpdate(sysEnv, smeId);
Vector result = new Vector();
result.add(jfp.getFpScope(sysEnv)) ;
result.add(jfp.getFpFolder(sysEnv)) ;
result.add(jfp.getFpLocal(sysEnv)) ;
return result;
} catch(NotFoundException nfe) {
}
fp = new HashMap();
Long seId = sme.getSeId(sysEnv);
long version = sme.getSeVersion(sysEnv).longValue();
se = SDMSSchedulingEntityTable.getObject(sysEnv, seId, version);
Vector v = SDMSEnvironmentTable.idx_neId.getVector(sysEnv, se.getNeId(sysEnv), version);
for(int i = 0; i < v.size(); i++) {
e = (SDMSEnvironment) v.get(i);
fp.put(e.getNrId(sysEnv), e);
}
Long parentId = se.getFolderId(sysEnv);
do {
SDMSFolder f = SDMSFolderTable.getObject(sysEnv, parentId, version);
Long id = f.getEnvId(sysEnv);
if(id != null) {
v = SDMSEnvironmentTable.idx_neId.getVector(sysEnv, id, version);
for(int i = 0; i < v.size(); i++) {
e = (SDMSEnvironment) v.get(i);
fp.put(e.getNrId(sysEnv), e);
}
}
parentId = f.getParentId(sysEnv);
} while(parentId != null);
v = SDMSResourceRequirementTable.idx_seId.getVector(sysEnv, se.getFpId(sysEnv), version);
for(int i = 0; i < v.size(); i++) {
rr = (SDMSResourceRequirement) v.get(i);
fp.put(rr.getNrId(sysEnv), rr);
}
v = SDMSResourceRequirementTable.idx_seId.getVector(sysEnv, seId, version);
for(int i = 0; i < v.size(); i++) {
rr = (SDMSResourceRequirement) v.get(i);
fp.put(rr.getNrId(sysEnv), rr);
}
return SystemEnvironment.sched.splitSmeFootprint(sysEnv, sme, se, fp, smeId);
}
private Vector splitSmeFootprint(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, SDMSSchedulingEntity se, HashMap fp, Long smeId)
throws SDMSException
{
SDMSResourceRequirement rr;
SDMSNamedResource nr;
SDMSResource r;
SDMSResource bestFit;
Long bestFitSmeId;
Long bestFitFId;
Long nrId;
HashMap fpFolder = new HashMap();
HashMap fpLocal = new HashMap();
HashMap fpScope = new HashMap();
Vector result = new Vector();
SDMSKey k = null;
Vector kv = null;
long actVersion = sme.getSeVersion(sysEnv).longValue();
if (sysEnv.tx.rscCache == null)
sysEnv.tx.rscCache = new HashMap();
HashMap myRscCache = sysEnv.tx.rscCache;
Iterator fpi = fp.keySet().iterator();
while(fpi.hasNext()) {
bestFit = null;
bestFitSmeId = null;
bestFitFId = null;
nrId = (Long) fpi.next();
SDMSProxy proxy = (SDMSProxy) fp.get(nrId);
if (proxy instanceof SDMSResourceRequirement) {
rr = (SDMSResourceRequirement) proxy;
} else {
rr = null;
}
if(rr == null) {
proxy.fix();
fpScope.put(nrId, proxy);
continue;
}
nr = SDMSNamedResourceTable.getObject(sysEnv, nrId);
if(nr.getUsage(sysEnv).intValue() == SDMSNamedResource.STATIC) {
rr.fix();
fpScope.put(nrId, rr);
continue;
}
SDMSSchedulingEntity myse = se;
SDMSSubmittedEntity mysme = sme;
boolean hit;
while(true) {
hit = false;
Long fId = myse.getFolderId(sysEnv);
long myActVersion = mysme.getSeVersion(sysEnv).longValue();
kv = new Vector();
while(fId != null) {
k = new SDMSKey(nrId, fId);
if (myRscCache.containsKey(k)) {
doTrace(cEnv, "Cache hit for folder " + k, SEVERITY_DEBUG);
hit = true;
Vector e = (Vector) myRscCache.get(k);
if (e == null) {
bestFitFId = null;
} else {
bestFit = (SDMSResource) e.get(0);
bestFitFId = (Long) e.get(1);
}
break;
} else {
doTrace(cEnv, "Cache miss for folder " + k, SEVERITY_DEBUG);
try {
r = SDMSResourceTable.idx_nrId_scopeId_getUnique(sysEnv, k);
bestFit = r;
bestFitFId = fId;
Vector e = new Vector();
r.fix();
e.add(r);
e.add(fId);
myRscCache.put(k, e);
for (int kvi = 0; kvi < kv.size(); ++kvi) {
myRscCache.put(kv.get(kvi), e);
}
break;
} catch (NotFoundException nfe) {
fId = SDMSFolderTable.getObject(sysEnv, fId, myActVersion).getParentId(sysEnv);
kv.add(k);
}
}
}
if(bestFitFId != null) break;
doTrace(cEnv, "No folder Resource found for " + k, SEVERITY_DEBUG);
if (!hit) {
for (int kvi = 0; kvi < kv.size(); ++kvi) {
myRscCache.put(kv.get(kvi), null);
}
}
Long pSmeId = mysme.getParentId(sysEnv);
if(pSmeId == null) break;
mysme = SDMSSubmittedEntityTable.getObject(sysEnv, pSmeId);
myse = SDMSSchedulingEntityTable.getObject(sysEnv, mysme.getSeId(sysEnv), myActVersion);
}
Long pSmeId = smeId;
kv = new Vector();
hit = false;
while(pSmeId != null) {
k = new SDMSKey(nrId, pSmeId);
if (myRscCache.containsKey(k)) {
doTrace(cEnv, "Cache hit for sme " + k, SEVERITY_DEBUG);
hit = true;
Vector e = (Vector) myRscCache.get(k);
if (e == null) {
bestFitSmeId = null;
} else {
bestFit = (SDMSResource) e.get(0);
bestFitSmeId = (Long) e.get(1);
}
break;
} else {
try {
doTrace(cEnv, "Cache miss for sme " + k, SEVERITY_DEBUG);
r = SDMSResourceTable.idx_nrId_scopeId_getUnique(sysEnv, k);
bestFit = r;
bestFitSmeId = pSmeId;
Vector e = new Vector();
r.fix();
e.add(r);
e.add(pSmeId);
myRscCache.put(k, e);
for (int kvi = 0; kvi < kv.size(); ++kvi) {
myRscCache.put(kv.get(kvi), e);
}
break;
} catch (NotFoundException nfe) {
pSmeId = SDMSSubmittedEntityTable.getObject(sysEnv, pSmeId).getParentId(sysEnv);
kv.add(k);
}
}
}
if(bestFitSmeId == null && !hit) {
doTrace(cEnv, "No sme Resource found for " + k, SEVERITY_DEBUG);
for (int kvi = 0; kvi < kv.size(); ++kvi) {
myRscCache.put(kv.get(kvi), null);
}
}
if(bestFitSmeId != null || bestFitFId != null) {
Integer requestableAmount = bestFit.getRequestableAmount(sysEnv);
Integer requestedAmount = rr.getAmount(sysEnv);
if(requestableAmount != null) {
if(requestableAmount.compareTo(requestedAmount) < 0 && sysEnv.tx.mode == SDMSTransaction.READWRITE) {
sme.setToError(sysEnv, "Job cannot run because of resource shortage on resource " + nr.pathString(sysEnv));
}
}
Vector v = new Vector();
rr.fix();
v.add(rr);
bestFit.fix();
v.add(bestFit);
if(bestFitSmeId != null) {
fpLocal.put(nrId, v);
} else {
fpFolder.put(nrId, v);
}
} else {
rr.fix();
fpScope.put(nrId, rr);
}
}
if(sysEnv.tx.mode == SDMSTransaction.READWRITE) {
SDMSnpJobFootprintTable.table.create(sysEnv, smeId, fpScope, fpFolder, fpLocal);
}
result.add(fpScope) ;
result.add(fpFolder) ;
result.add(fpLocal) ;
return result;
}
void recalc_sfp(SystemEnvironment sysEnv, Long scopeId, SDMSScope s)
throws SDMSException
{
doTrace(cEnv, "Calculating footprint for scope " + s.pathString(sysEnv), SEVERITY_DEBUG);
if(s.getType(sysEnv).intValue() == SDMSScope.SERVER) {
HashMap fp = getScopeFootprint(sysEnv, s);
doTrace(cEnv, "footprint = " + fp.toString(), SEVERITY_DEBUG);
(SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, scopeId)).setFp(sysEnv, fp);
} else {
Vector v = SDMSScopeTable.idx_parentId.getVector(sysEnv, scopeId);
for(int i = 0; i < v.size(); i++) {
s = (SDMSScope) v.get(i);
recalc_sfp(sysEnv, s.getId(sysEnv), s);
}
}
}
void destroyEnvironment(SystemEnvironment sysEnv)
throws SDMSException
{
SDMSnpSrvrSRFootprintTable.table.clearTableUnlocked(sysEnv);
}
void buildEnvironment(SystemEnvironment sysEnv, boolean jsOnly)
throws SDMSException
{
SDMSScope s;
Vector v;
v = SDMSScopeTable.idx_type.getVector(sysEnv, new Integer(SDMSScope.SERVER));
for(int j = 0; j < v.size(); j++) {
s = (SDMSScope) v.get(j);
SDMSnpSrvrSRFootprintTable.table.create(sysEnv, s.getId(sysEnv), null, getScopeFootprint(sysEnv, s));
}
if (!jsOnly) {
Vector rl = new Vector();
SDMSSubmittedEntity sme;
SDMSSchedulingEntity se;
v = SDMSSubmittedEntityTable.idx_state.getVector(sysEnv, new Integer(SDMSSubmittedEntity.DEPENDENCY_WAIT));
for (int i = 0; i < v.size(); ++i) {
sme = (SDMSSubmittedEntity) v.get(i);
se = SDMSSchedulingEntityTable.getObject(sysEnv, sme.getSeId(sysEnv), sme.getSeVersion(sysEnv));
if (se.getType(sysEnv).intValue() != SDMSSchedulingEntity.JOB) continue;
if (sme.getOldState(sysEnv) != null)
rl.add(sme.getId(sysEnv));
}
v = SDMSSubmittedEntityTable.idx_state.getVector(sysEnv, new Integer(SDMSSubmittedEntity.SYNCHRONIZE_WAIT));
for (int i = 0; i < v.size(); ++i) {
sme = (SDMSSubmittedEntity) v.get(i);
se = SDMSSchedulingEntityTable.getObject(sysEnv, sme.getSeId(sysEnv), sme.getSeVersion(sysEnv));
if (se.getType(sysEnv).intValue() != SDMSSchedulingEntity.JOB) continue;
if (sme.getOldState(sysEnv) != null)
rl.add(sme.getId(sysEnv));
}
addToRequestList(rl);
}
needSched = true;
}
public void requestSchedule()
{
needSched = true;
this.wakeUp();
}
public void notifyChange(SystemEnvironment sysEnv, SDMSResource r, Long scopeId, int change)
throws SDMSException
{
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
if (scopeId != null) {
SDMSScope s = null;
try {
s = SDMSScopeTable.getObject(sysEnv, scopeId);
} catch (NotFoundException nfe) {
}
if (s != null)
recalc_sfp(sysEnv, scopeId, s);
}
switch(change) {
case CREATE:
needReSched = true;
break;
case ALTER:
break;
case ALTER_REQAMOUNT:
needReSched = true;
break;
case OFFLINE_ONLINE:
needReSched = true;
break;
case DELETE:
needReSched = true;
break;
default:
throw new FatalException(new SDMSMessage(sysEnv, "03202252140", "Unknown change code $1", new Integer(change)));
}
needSched = true;
}
public void notifyChange(SystemEnvironment sysEnv, SDMSNamedResource nr, int change)
throws SDMSException
{
switch(change) {
case CREATE:
break;
case ALTER:
break;
case DELETE:
break;
default:
throw new FatalException(new SDMSMessage(sysEnv, "03203060018", "Unknown change code $1", new Integer(change)));
}
needSched = true;
}
public void notifyChange(SystemEnvironment sysEnv, SDMSScope s, int change)
throws SDMSException
{
switch(change) {
case CREATE:
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
SDMSnpSrvrSRFootprintTable.table.create(sysEnv, s.getId(sysEnv), null, getScopeFootprint(sysEnv, s));
break;
case ALTER:
break;
case DELETE:
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
SDMSnpSrvrSRFootprint f = SDMSnpSrvrSRFootprintTable.idx_sId_getUnique(sysEnv, s.getId(sysEnv));
f.delete(sysEnv);
needReSched = true;
break;
case REGISTER:
case DEREGISTER:
needReSched = true;
break;
case SUSPEND:
break;
case RESUME:
break;
case SHUTDOWN:
needSched = true;
break;
case MOVE:
case COPY:
if (sysEnv.maxWriter > 1)
LockingSystem.lock(sysEnv, this, ObjectLock.EXCLUSIVE);
destroyEnvironment(sysEnv);
buildEnvironment(sysEnv, true);
needSched = true;
break;
default:
throw new FatalException(new SDMSMessage(sysEnv, "03202252142", "Unknown change code $1", new Integer(change)));
}
needSched = true;
}
public void notifyChange(SystemEnvironment sysEnv, SDMSSubmittedEntity sme, int change)
throws SDMSException
{
int size;
Vector v;
switch(change) {
case SUSPEND:
break;
case STATECHANGE:
int s = sme.getState(sysEnv).intValue();
switch(s) {
case SDMSSubmittedEntity.FINISHED:
case SDMSSubmittedEntity.FINAL:
case SDMSSubmittedEntity.ERROR:
case SDMSSubmittedEntity.CANCELLED:
case SDMSSubmittedEntity.BROKEN_ACTIVE:
case SDMSSubmittedEntity.BROKEN_FINISHED:
v = SDMSRunnableQueueTable.idx_smeId.getVector(sysEnv, sme.getId(sysEnv));
size = v.size();
for (int i = 0; i < size; i ++) {
SDMSRunnableQueue rq = (SDMSRunnableQueue)v.get(i);
rq.delete(sysEnv);
}
break;
}
needSched = true;
break;
case FINISH:
case PRIORITY:
case RERUN:
case IGNORE_RESOURCE:
case SUBMIT:
case RESUME:
needSched = true;
break;
default:
throw new FatalException(new SDMSMessage(sysEnv, "03202252317", "Unknown change code $1", new Integer(change)));
}
}
public void requestReschedule()
{
needReSched = true;
}
}
class DoSchedule extends Node
{
static final int SCHEDULE = 0;
static final int INITIALIZE = 1;
int action;
public DoSchedule()
{
super();
action = SCHEDULE;
auditFlag = false;
}
public DoSchedule(int a)
{
super();
action = a;
auditFlag = false;
}
public void go(SystemEnvironment sysEnv)
throws SDMSException
{
switch(action) {
case SCHEDULE:
SystemEnvironment.sched.scheduleProtected(sysEnv);
break;
case INITIALIZE:
SystemEnvironment.sched.buildEnvironment(sysEnv, false);
}
}
}
class prioComparator implements Comparator
{
SystemEnvironment sysEnv;
long priorityDelay;
long now;
public prioComparator(SystemEnvironment e, long p)
{
sysEnv = e;
priorityDelay = p;
now = System.currentTimeMillis();
}
public void setNow()
{
now = System.currentTimeMillis();
}
public int dynPrio(SDMSSubmittedEntity sme)
throws SDMSException
{
int p = sme.getPriority(sysEnv).intValue();
int lb = Math.max(SystemEnvironment.priorityLowerBound, sme.getMinPriority(sysEnv).intValue());
if (p <= lb) return p;
long t = now - sme.getSubmitTs(sysEnv).longValue();
long priorityDelay = sme.getAgingAmount(sysEnv).intValue();
if(priorityDelay != 0)
t /= (priorityDelay * 60000);
else
t = 0;
if(t >= p) p = lb;
else p = Math.max(p - (int) t, lb);
return p;
}
public int compare(Object o1, Object o2)
{
SDMSSubmittedEntity sme1;
SDMSSubmittedEntity sme2;
int p1, p2;
sme1 = (SDMSSubmittedEntity) o1;
sme2 = (SDMSSubmittedEntity) o2;
try {
p1 = dynPrio(sme1);
p2 = dynPrio(sme2);
if(p1 < p2) return -1;
if(p1 > p2) return 1;
int rp1, rp2;
rp1 = sme1.getRawPriority(sysEnv).intValue();
rp2 = sme2.getRawPriority(sysEnv).intValue();
if(rp1 < rp2) return -1;
if(rp1 > rp2) return 1;
long l1, l2;
l1 = sme1.getId(sysEnv).longValue();
l2 = sme2.getId(sysEnv).longValue();
if(l1 < l2) return -1;
if(l1 > l2) return 1;
} catch (SDMSException e) {
throw new RuntimeException("Error while comparing : " + e.toString());
}
return 0;
}
}
class Reservator
{
public Long rId;
public Long smeId;
public int amount;
public Lockmode lock;
public int seq;
public Reservator(Long r, Long s)
{
rId = r;
smeId = s;
amount = 0;
lock = new Lockmode();
seq = 0;
}
public Reservator(Long r, Long s, int a)
{
rId = r;
smeId = s;
amount = a;
lock = new Lockmode();
seq = 0;
}
public Reservator(Long r, Long s, int a, Lockmode l)
{
rId = r;
smeId = s;
amount = a;
lock = l;
seq = 0;
}
public Reservator(Long r, Long s, int a, int l)
{
rId = r;
smeId = s;
amount = a;
lock = new Lockmode(l);
seq = 0;
}
public int addLock(Lockmode lm)
{
int l = lm.getLockmode();
int ol = lock.getLockmode();
if(l != Lockmode.N) {
if(ol == Lockmode.N) lock.setLockmode(l);
else {
if(l != ol) lock.setLockmode(Lockmode.X);
}
}
return lock.getLockmode();
}
}
class Locklist
{
private HashMap lpr;
private HashMap lpj;
static private final Long ZERO = new Long(0);
public Locklist()
{
lpr = new HashMap();
lpj = new HashMap();
}
public Reservator get(Long rId, Long smeId)
{
HashMap h = (HashMap) lpr.get(rId);
if(h == null) return new Reservator(rId, smeId);
Reservator r = (Reservator) h.get(smeId);
if(r == null) return new Reservator(rId, smeId);
return r;
}
public Reservator get(Long rId)
{
return get(rId, ZERO);
}
public void set(Reservator r)
{
HashMap h = (HashMap) lpr.get(r.rId);
if(h == null) {
h = new HashMap();
lpr.put(r.rId, h);
}
h.put(r.smeId, r);
Reservator rt = (Reservator) h.get(ZERO);
if(rt == null) {
rt = new Reservator(r.rId, ZERO);
h.put(ZERO, rt);
}
rt.amount += r.amount;
rt.addLock(r.lock);
rt.seq++;
r.seq = rt.seq;
h = (HashMap) lpj.get(r.smeId);
if(h == null) {
h = new HashMap();
lpj.put(r.smeId, h);
}
h.put(r.rId, r);
}
public void removeSme(Long smeId)
{
HashMap h = (HashMap) lpj.get(smeId);
if(h == null) return;
Iterator i = h.keySet().iterator();
while(i.hasNext()) {
Long rId = (Long) i.next();
HashMap rh = (HashMap) lpr.get(rId);
rh.remove(smeId);
rh.remove(ZERO);
Reservator zr = new Reservator(rId, ZERO);
Iterator j = rh.values().iterator();
while(j.hasNext()) {
Reservator r = (Reservator) j.next();
zr.amount += r.amount;
zr.addLock(r.lock);
zr.seq++;
}
rh.put(ZERO, zr);
}
lpj.remove(smeId);
}
public void remove(Long rId, Long smeId)
{
HashMap h = (HashMap) lpj.get(smeId);
if(h == null) return;
if(h.remove(rId) == null) return;
h = (HashMap) lpr.get(rId);
h.remove(smeId);
h.remove(ZERO);
Reservator zr = new Reservator(rId, ZERO);
Iterator i = h.values().iterator();
while(i.hasNext()) {
Reservator r = (Reservator) i.next();
zr.amount += r.amount;
zr.addLock(r.lock);
zr.seq++;
}
h.put(ZERO, zr);
}
}
| enhanced expressions
| src/server/SchedulingThread.java | enhanced expressions |
|
Java | apache-2.0 | a2d33c8424916a2ab27d8a787f86f922b0c7ba85 | 0 | cdjackson/zigbee4java,tlaukkan/zigbee4java,cdealti/zigbee4java,cdjackson/zigbee4java,cdjackson/zigbee4java,tlaukkan/zigbee4java,tlaukkan/zigbee4java,Arvis-Home/zigbee4java,cdealti/zigbee4java,Arvis-Home/zigbee4java | /**
* Copyright 2013 Tommi S.E. Laukkanen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bubblecloud.zigbee;
import org.bubblecloud.zigbee.network.EndpointListener;
import org.bubblecloud.zigbee.network.ZigBeeEndpoint;
import org.bubblecloud.zigbee.network.discovery.ZigBeeDiscoveryManager;
import org.bubblecloud.zigbee.network.impl.NetworkStateSerializer;
import org.bubblecloud.zigbee.network.impl.ZigBeeNetwork;
import org.bubblecloud.zigbee.network.model.DiscoveryMode;
import org.bubblecloud.zigbee.network.model.DriverStatus;
import org.bubblecloud.zigbee.network.model.NetworkMode;
import org.bubblecloud.zigbee.network.packet.ZToolAddress16;
import org.bubblecloud.zigbee.network.packet.zdo.ZDO_MGMT_PERMIT_JOIN_REQ;
import org.bubblecloud.zigbee.network.packet.zdo.ZDO_MGMT_PERMIT_JOIN_RSP;
import org.bubblecloud.zigbee.api.DeviceListener;
import org.bubblecloud.zigbee.network.impl.ApplicationFrameworkLayer;
import org.bubblecloud.zigbee.api.*;
import org.bubblecloud.zigbee.api.device.generic.*;
import org.bubblecloud.zigbee.api.device.hvac.Pump;
import org.bubblecloud.zigbee.api.device.hvac.TemperatureSensor;
import org.bubblecloud.zigbee.api.device.lighting.*;
import org.bubblecloud.zigbee.api.device.security_safety.IASAncillaryControlEquipment;
import org.bubblecloud.zigbee.api.device.security_safety.IASControlAndIndicatingEquipment;
import org.bubblecloud.zigbee.api.device.security_safety.IAS_Warning;
import org.bubblecloud.zigbee.api.device.security_safety.IAS_Zone;
import org.bubblecloud.zigbee.api.device.impl.*;
import org.bubblecloud.zigbee.api.DeviceBase;
import org.bubblecloud.zigbee.network.port.ZigBeeNetworkManagerImpl;
import org.bubblecloud.zigbee.network.port.ZigBeePort;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
/**
* ZigBee Application Interface.
* @author <a href="mailto:[email protected]">Tommi S.E. Laukkanen</a>
* @author <a href="mailto:[email protected]">Chris Hatton</a>
*/
public class ZigBeeApi implements EndpointListener, DeviceListener {
/**
* The logger.
*/
private final static Logger LOGGER = LoggerFactory.getLogger(ZigBeeDiscoveryManager.class);
/**
* The ZigBee network manager.
*/
private final ZigBeeNetworkManagerImpl networkManager;
/**
* The ZigBee discovery manager.
*/
private final ZigBeeDiscoveryManager discoveryManager;
/**
* The ZigBee context.
*/
private ZigBeeApiContext context;
/**
* The zigbee network.
*/
private ZigBeeNetwork network;
/**
* Constructor to configure the port interface.
*
* @param port the ZigBee interface port (reference implementation provided by the zigbee4java-serialPort module)
* @param pan the pan
* @param channel the channel
* @param discoveryModes the discovery modes
* @param resetNetwork the flag indicating network reset on startup
*/
public ZigBeeApi(final ZigBeePort port, final int pan, final int channel,
final EnumSet<DiscoveryMode> discoveryModes, final boolean resetNetwork) {
networkManager = new ZigBeeNetworkManagerImpl(port,
NetworkMode.Coordinator, pan, channel, resetNetwork, 2500L);
discoveryManager = new ZigBeeDiscoveryManager(networkManager, discoveryModes);
}
/**
* Constructor to configure the port interface.
*
* @param port the ZigBee interface port (reference implementation provided by the zigbee4java-serialPort module)
* @param pan the pan
* @param channel the channel
* @param resetNetwork the flag indicating network reset on startup
*/
public ZigBeeApi(final ZigBeePort port, final int pan, final int channel,
final boolean resetNetwork, final EnumSet<DiscoveryMode> discoveryModes) {
networkManager = new ZigBeeNetworkManagerImpl(port, NetworkMode.Coordinator, pan, channel, resetNetwork, 2500L);
discoveryManager = new ZigBeeDiscoveryManager(networkManager, discoveryModes);
network = ApplicationFrameworkLayer.getAFLayer(networkManager).getZigBeeNetwork();
network.addEndpointListenerListener(this);
context = new ZigBeeApiContext();
final ClusterFactory clusterFactory = new ClusterFactoryImpl(context);
context.setClusterFactory(clusterFactory);
final Map<Class<?>, Class<?>> deviceIntefaceImplemetnationMap = new HashMap<Class<?>, Class<?>>();
deviceIntefaceImplemetnationMap.put(ColorDimmableLight.class, ColorDimmableLightDevice.class);
deviceIntefaceImplemetnationMap.put(DimmableLight.class, DimmableLightDevice.class);
deviceIntefaceImplemetnationMap.put(IAS_Zone.class, IAS_ZoneDevice.class);
deviceIntefaceImplemetnationMap.put(IASAncillaryControlEquipment.class, IASAncillaryControlEquipmentDevice.class);
deviceIntefaceImplemetnationMap.put(IASControlAndIndicatingEquipment.class, IASControlAndIndicatingEquipmentDevice.class);
deviceIntefaceImplemetnationMap.put(LevelControlSwitch.class, LevelControlSwitchDevice.class);
deviceIntefaceImplemetnationMap.put(LightSensor.class, LightSensorDevice.class);
deviceIntefaceImplemetnationMap.put(MainsPowerOutlet.class, MainsPowerOutletDevice.class);
deviceIntefaceImplemetnationMap.put(OccupancySensor.class, OccupancySensorDevice.class);
deviceIntefaceImplemetnationMap.put(OnOffLight.class, OnOffLightDevice.class);
deviceIntefaceImplemetnationMap.put(OnOffLightSwitch.class, OnOffLightSwitchDevice.class);
deviceIntefaceImplemetnationMap.put(OnOffOutput.class, OnOffOutputDevice.class);
deviceIntefaceImplemetnationMap.put(OnOffSwitch.class, OnOffSwitchDevice.class);
deviceIntefaceImplemetnationMap.put(OnOffLight.class, OnOffLightDevice.class);
deviceIntefaceImplemetnationMap.put(Pump.class, PumpDevice.class);
deviceIntefaceImplemetnationMap.put(TemperatureSensor.class, TemperatureSensorDevice.class);
deviceIntefaceImplemetnationMap.put(IAS_Warning.class, IAS_Warning_Device.class);
deviceIntefaceImplemetnationMap.put(SimpleSensor.class, SimpleSensorDevice.class);
final Iterator<Map.Entry<Class<?>, Class<?>>> i = deviceIntefaceImplemetnationMap.entrySet().iterator();
while (i.hasNext()) {
Map.Entry<Class<?>, Class<?>> refining = i.next();
try {
context.getDeviceFactories().add(
new DeviceFactoryImpl(context, refining.getKey(), refining.getValue()));
} catch (Exception ex) {
LOGGER.error("Failed to register DeviceFactoryImpl for " + refining.getKey(), ex);
}
}
}
/**
* Starts up network manager, network, context and discovery manager.
*
* @return true if startup was success.
*/
public boolean startup() {
networkManager.startup();
context.addDeviceListener(this);
while (true) {
if (networkManager.getDriverStatus() == DriverStatus.NETWORK_READY) {
break;
}
if (networkManager.getDriverStatus() == DriverStatus.CLOSED) {
return false;
}
try {
Thread.sleep(100);
} catch (final InterruptedException e) {
return false;
}
}
ApplicationFrameworkLayer.getAFLayer(networkManager).createDefaultSendingEndPoint();
/* disable permit join by default */
permitJoin(false);
discoveryManager.startup();
return true;
}
/**
* Return true if initial networking browsing based on associations is complete.
*
* @return true if initial network browsing is complete.
*/
public boolean isInitialBrowsingComplete() {
return discoveryManager.isInitialNetworkBrowsingComplete();
}
/**
* Shuts down network manager, network, context and discovery manager.
*/
public void shutdown() {
context.removeDeviceListener(this);
network.removeEndpointListener(this);
discoveryManager.shutdown();
networkManager.shutdown();
}
/**
* Changes the permit join state.
*
* @param joinState boolean join state, true for enabled indefinetly, false for disabled
*
* @return true if success
*/
public boolean permitJoin(boolean joinState) {
if (joinState) {
return sendPermitJoin((byte)0xFF);
} else {
return sendPermitJoin((byte)0);
}
}
/**
* Changes the permit join state with a timeout duration.
*
* @param durationSeconds join duration in seconds, from 1-254
*
* @return true if success
*/
public boolean permitJoin(int durationSeconds) {
if (durationSeconds < 1 || durationSeconds > 254) {
LOGGER.error("permitJoin durationSeconds out of range: {}", durationSeconds);
return false;
}
return sendPermitJoin((byte)durationSeconds);
}
/**
* Sends the permit join state to routers and coordinators.
*
* @param data the data in the permit join request
*
* @return true if success
*/
private boolean sendPermitJoin(byte data) {
ZDO_MGMT_PERMIT_JOIN_RSP result;
final byte AddrBroadcast = 0x0F;
final byte AddrUnicast = 0x02;
LOGGER.debug("Sending permit join with data: {}", data);
/* Notify routers of permit join change; don't check result because they're not obligated to respond */
result = networkManager.sendPermitJoinRequest(new ZDO_MGMT_PERMIT_JOIN_REQ(AddrBroadcast, ZToolAddress16.ZCZR_BROADCAST, data, 1));
/* Notify coordinator of permit join change */
result = networkManager.sendPermitJoinRequest(new ZDO_MGMT_PERMIT_JOIN_REQ(AddrUnicast, new ZToolAddress16(0, 0), data, 1));
if (result == null || result.Status != 0) {
LOGGER.error("Error sending ZDO_MGMT_PERMIT_JOIN_REQ");
return false;
}
return true;
}
/**
* Serializes network state.
* @return the network state
*/
public String serializeNetworkState() {
final NetworkStateSerializer networkStateSerializer = new NetworkStateSerializer();
return networkStateSerializer.serialize(network);
}
/**
* Deserialize network state.
* @param networkState the network state
*/
public void deserializeNetworkState(final String networkState) {
final NetworkStateSerializer networkStateSerializer = new NetworkStateSerializer();
networkStateSerializer.deserialize(networkManager, network, networkState);
}
/**
* Gets ZigBee network manager.
*
* @return the ZigBee network manager.
*/
public ZigBeeNetworkManagerImpl getZigBeeNetworkManager() {
return networkManager;
}
/**
* Gets ZigBee discovery manager.
*
* @return the ZigBee discovery manager.
*/
public ZigBeeDiscoveryManager getZigBeeDiscoveryManager() {
return discoveryManager;
}
/**
* Gets ZigBee proxy context.
*
* @return the ZigBee proxy context.
*/
public ZigBeeApiContext getZigBeeApiContext() {
return context;
}
/**
* Gets ZigBee network.
*
* @return the ZigBee network.
*/
public ZigBeeNetwork getZigBeeNetwork() {
return network;
}
public Device getDevice(String endPointId) {
return context.getDevice(endPointId);
}
public List<Device> getDevices() {
return context.getDevices();
}
public void addDeviceListener(DeviceListener deviceListener) {
context.addDeviceListener(deviceListener);
}
public void removeDeviceListener(DeviceListener deviceListener) {
context.removeDeviceListener(deviceListener);
}
@Override
public void endpointAdded(final ZigBeeEndpoint endpoint) {
final DeviceFactory factory = context.getBestDeviceProxyFactory(endpoint);
if (factory == null) { // pending services
LOGGER.warn("No proxy for ZigBee endpoint {} found.", endpoint.getDeviceTypeId());
return;
}
final DeviceBase haDevice = factory.getInstance(endpoint);
context.addDevice(haDevice);
LOGGER.trace("Endpoint added: " + endpoint.getEndpointId());
}
@Override
public void endpointUpdated(final ZigBeeEndpoint endpoint) {
LOGGER.trace("Endpoint updated: " + endpoint.getEndpointId());
final Device device = context.getDevice(endpoint.getEndpointId());
if (device != null) {
context.updateDevice(device);
}
}
@Override
public void endpointRemoved(final ZigBeeEndpoint endpoint) {
LOGGER.trace("Endpoint removed: " + endpoint.getEndpointId());
final Device device = context.getDevice(endpoint.getEndpointId());
if (device != null) {
context.removeDevice(device);
}
}
@Override
public void deviceAdded(final Device device) {
LOGGER.debug(device.getClass().getSimpleName() +
" added: " + device.getEndpoint().getEndpointId());
}
@Override
public void deviceUpdated(final Device device) {
LOGGER.trace(device.getClass().getSimpleName() +
" updated: " + device.getEndpoint().getEndpointId());
}
@Override
public void deviceRemoved(final Device device) {
LOGGER.debug(device.getClass().getSimpleName() +
" removed: " + device.getEndpoint().getEndpointId());
}
}
| zigbee-api/src/main/java/org/bubblecloud/zigbee/ZigBeeApi.java | /**
* Copyright 2013 Tommi S.E. Laukkanen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bubblecloud.zigbee;
import org.bubblecloud.zigbee.network.EndpointListener;
import org.bubblecloud.zigbee.network.ZigBeeEndpoint;
import org.bubblecloud.zigbee.network.discovery.ZigBeeDiscoveryManager;
import org.bubblecloud.zigbee.network.impl.NetworkStateSerializer;
import org.bubblecloud.zigbee.network.impl.ZigBeeNetwork;
import org.bubblecloud.zigbee.network.model.DiscoveryMode;
import org.bubblecloud.zigbee.network.model.DriverStatus;
import org.bubblecloud.zigbee.network.model.NetworkMode;
import org.bubblecloud.zigbee.network.packet.ZToolAddress16;
import org.bubblecloud.zigbee.network.packet.zdo.ZDO_MGMT_PERMIT_JOIN_REQ;
import org.bubblecloud.zigbee.network.packet.zdo.ZDO_MGMT_PERMIT_JOIN_RSP;
import org.bubblecloud.zigbee.api.DeviceListener;
import org.bubblecloud.zigbee.network.impl.ApplicationFrameworkLayer;
import org.bubblecloud.zigbee.api.*;
import org.bubblecloud.zigbee.api.device.generic.*;
import org.bubblecloud.zigbee.api.device.hvac.Pump;
import org.bubblecloud.zigbee.api.device.hvac.TemperatureSensor;
import org.bubblecloud.zigbee.api.device.lighting.*;
import org.bubblecloud.zigbee.api.device.security_safety.IASAncillaryControlEquipment;
import org.bubblecloud.zigbee.api.device.security_safety.IASControlAndIndicatingEquipment;
import org.bubblecloud.zigbee.api.device.security_safety.IAS_Warning;
import org.bubblecloud.zigbee.api.device.security_safety.IAS_Zone;
import org.bubblecloud.zigbee.api.device.impl.*;
import org.bubblecloud.zigbee.api.DeviceBase;
import org.bubblecloud.zigbee.network.port.ZigBeeNetworkManagerImpl;
import org.bubblecloud.zigbee.network.port.ZigBeePort;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
/**
* ZigBee Application Interface.
* @author <a href="mailto:[email protected]">Tommi S.E. Laukkanen</a>
* @author <a href="mailto:[email protected]">Chris Hatton</a>
*/
public class ZigBeeApi implements EndpointListener, DeviceListener {
/**
* The logger.
*/
private final static Logger LOGGER = LoggerFactory.getLogger(ZigBeeDiscoveryManager.class);
/**
* The ZigBee network manager.
*/
private final ZigBeeNetworkManagerImpl networkManager;
/**
* The ZigBee discovery manager.
*/
private final ZigBeeDiscoveryManager discoveryManager;
/**
* The ZigBee context.
*/
private ZigBeeApiContext context;
/**
* The zigbee network.
*/
private ZigBeeNetwork network;
/**
* Constructor to configure the port interface.
*
* @param port the ZigBee interface port (reference implementation provided by the zigbee4java-serialPort module)
* @param pan the pan
* @param channel the channel
* @param discoveryModes the discovery modes
* @param resetNetwork the flag indicating network reset on startup
*/
public ZigBeeApi(final ZigBeePort port, final int pan, final int channel,
final EnumSet<DiscoveryMode> discoveryModes, final boolean resetNetwork) {
networkManager = new ZigBeeNetworkManagerImpl(port,
NetworkMode.Coordinator, pan, channel, resetNetwork, 2500L);
discoveryManager = new ZigBeeDiscoveryManager(networkManager, discoveryModes);
}
/**
* Constructor to configure the port interface.
*
* @param port the ZigBee interface port (reference implementation provided by the zigbee4java-serialPort module)
* @param pan the pan
* @param channel the channel
* @param resetNetwork the flag indicating network reset on startup
*/
public ZigBeeApi(final ZigBeePort port, final int pan, final int channel,
final boolean resetNetwork, final EnumSet<DiscoveryMode> discoveryModes) {
networkManager = new ZigBeeNetworkManagerImpl(port, NetworkMode.Coordinator, pan, channel, resetNetwork, 2500L);
discoveryManager = new ZigBeeDiscoveryManager(networkManager, discoveryModes);
network = ApplicationFrameworkLayer.getAFLayer(networkManager).getZigBeeNetwork();
network.addEndpointListenerListener(this);
context = new ZigBeeApiContext();
final ClusterFactory clusterFactory = new ClusterFactoryImpl(context);
context.setClusterFactory(clusterFactory);
final Map<Class<?>, Class<?>> deviceIntefaceImplemetnationMap = new HashMap<Class<?>, Class<?>>();
deviceIntefaceImplemetnationMap.put(ColorDimmableLight.class, ColorDimmableLightDevice.class);
deviceIntefaceImplemetnationMap.put(DimmableLight.class, DimmableLightDevice.class);
deviceIntefaceImplemetnationMap.put(IAS_Zone.class, IAS_ZoneDevice.class);
deviceIntefaceImplemetnationMap.put(IASAncillaryControlEquipment.class, IASAncillaryControlEquipmentDevice.class);
deviceIntefaceImplemetnationMap.put(IASControlAndIndicatingEquipment.class, IASControlAndIndicatingEquipmentDevice.class);
deviceIntefaceImplemetnationMap.put(LevelControlSwitch.class, LevelControlSwitchDevice.class);
deviceIntefaceImplemetnationMap.put(LightSensor.class, LightSensorDevice.class);
deviceIntefaceImplemetnationMap.put(MainsPowerOutlet.class, MainsPowerOutletDevice.class);
deviceIntefaceImplemetnationMap.put(OccupancySensor.class, OccupancySensorDevice.class);
deviceIntefaceImplemetnationMap.put(OnOffLight.class, OnOffLightDevice.class);
deviceIntefaceImplemetnationMap.put(OnOffLightSwitch.class, OnOffLightSwitchDevice.class);
deviceIntefaceImplemetnationMap.put(OnOffOutput.class, OnOffOutputDevice.class);
deviceIntefaceImplemetnationMap.put(OnOffSwitch.class, OnOffSwitchDevice.class);
deviceIntefaceImplemetnationMap.put(OnOffLight.class, OnOffLightDevice.class);
deviceIntefaceImplemetnationMap.put(Pump.class, PumpDevice.class);
deviceIntefaceImplemetnationMap.put(TemperatureSensor.class, TemperatureSensorDevice.class);
deviceIntefaceImplemetnationMap.put(IAS_Warning.class, IAS_Warning_Device.class);
deviceIntefaceImplemetnationMap.put(SimpleSensor.class, SimpleSensorDevice.class);
final Iterator<Map.Entry<Class<?>, Class<?>>> i = deviceIntefaceImplemetnationMap.entrySet().iterator();
while (i.hasNext()) {
Map.Entry<Class<?>, Class<?>> refining = i.next();
try {
context.getDeviceFactories().add(
new DeviceFactoryImpl(context, refining.getKey(), refining.getValue()));
} catch (Exception ex) {
LOGGER.error("Failed to register DeviceFactoryImpl for " + refining.getKey(), ex);
}
}
}
/**
* Starts up network manager, network, context and discovery manager.
*
* @return true if startup was success.
*/
public boolean startup() {
networkManager.startup();
context.addDeviceListener(this);
while (true) {
if (networkManager.getDriverStatus() == DriverStatus.NETWORK_READY) {
break;
}
if (networkManager.getDriverStatus() == DriverStatus.CLOSED) {
return false;
}
try {
Thread.sleep(100);
} catch (final InterruptedException e) {
return false;
}
}
ApplicationFrameworkLayer.getAFLayer(networkManager).createDefaultSendingEndPoint();
discoveryManager.startup();
return true;
}
/**
* Return true if initial networking browsing based on associations is complete.
*
* @return true if initial network browsing is complete.
*/
public boolean isInitialBrowsingComplete() {
return discoveryManager.isInitialNetworkBrowsingComplete();
}
/**
* Shuts down network manager, network, context and discovery manager.
*/
public void shutdown() {
context.removeDeviceListener(this);
network.removeEndpointListener(this);
discoveryManager.shutdown();
networkManager.shutdown();
}
/**
* Changes the permit join state.
*
* @param joinState boolean join state, true for enabled indefinetly, false for disabled
*
* @return true if success
*/
public boolean permitJoin(boolean joinState) {
if (joinState) {
return sendPermitJoin((byte)0xFF);
} else {
return sendPermitJoin((byte)0);
}
}
/**
* Changes the permit join state with a timeout duration.
*
* @param durationSeconds join duration in seconds, from 1-254
*
* @return true if success
*/
public boolean permitJoin(int durationSeconds) {
if (durationSeconds < 1 || durationSeconds > 254) {
LOGGER.error("permitJoin durationSeconds out of range: {}", durationSeconds);
return false;
}
return sendPermitJoin((byte)durationSeconds);
}
/**
* Sends the permit join state to routers and coordinators.
*
* @param data the data in the permit join request
*
* @return true if success
*/
private boolean sendPermitJoin(byte data) {
ZDO_MGMT_PERMIT_JOIN_RSP result;
final byte AddrBroadcast = 0x0F;
final byte AddrUnicast = 0x02;
LOGGER.debug("Sending permit join with data: {}", data);
/* Notify routers of permit join change; don't check result because they're not obligated to respond */
result = networkManager.sendPermitJoinRequest(new ZDO_MGMT_PERMIT_JOIN_REQ(AddrBroadcast, ZToolAddress16.ZCZR_BROADCAST, data, 1));
/* Notify coordinator of permit join change */
result = networkManager.sendPermitJoinRequest(new ZDO_MGMT_PERMIT_JOIN_REQ(AddrUnicast, new ZToolAddress16(0, 0), data, 1));
if (result == null || result.Status != 0) {
LOGGER.error("Error sending ZDO_MGMT_PERMIT_JOIN_REQ");
return false;
}
return true;
}
/**
* Serializes network state.
* @return the network state
*/
public String serializeNetworkState() {
final NetworkStateSerializer networkStateSerializer = new NetworkStateSerializer();
return networkStateSerializer.serialize(network);
}
/**
* Deserialize network state.
* @param networkState the network state
*/
public void deserializeNetworkState(final String networkState) {
final NetworkStateSerializer networkStateSerializer = new NetworkStateSerializer();
networkStateSerializer.deserialize(networkManager, network, networkState);
}
/**
* Gets ZigBee network manager.
*
* @return the ZigBee network manager.
*/
public ZigBeeNetworkManagerImpl getZigBeeNetworkManager() {
return networkManager;
}
/**
* Gets ZigBee discovery manager.
*
* @return the ZigBee discovery manager.
*/
public ZigBeeDiscoveryManager getZigBeeDiscoveryManager() {
return discoveryManager;
}
/**
* Gets ZigBee proxy context.
*
* @return the ZigBee proxy context.
*/
public ZigBeeApiContext getZigBeeApiContext() {
return context;
}
/**
* Gets ZigBee network.
*
* @return the ZigBee network.
*/
public ZigBeeNetwork getZigBeeNetwork() {
return network;
}
public Device getDevice(String endPointId) {
return context.getDevice(endPointId);
}
public List<Device> getDevices() {
return context.getDevices();
}
public void addDeviceListener(DeviceListener deviceListener) {
context.addDeviceListener(deviceListener);
}
public void removeDeviceListener(DeviceListener deviceListener) {
context.removeDeviceListener(deviceListener);
}
@Override
public void endpointAdded(final ZigBeeEndpoint endpoint) {
final DeviceFactory factory = context.getBestDeviceProxyFactory(endpoint);
if (factory == null) { // pending services
LOGGER.warn("No proxy for ZigBee endpoint {} found.", endpoint.getDeviceTypeId());
return;
}
final DeviceBase haDevice = factory.getInstance(endpoint);
context.addDevice(haDevice);
LOGGER.trace("Endpoint added: " + endpoint.getEndpointId());
}
@Override
public void endpointUpdated(final ZigBeeEndpoint endpoint) {
LOGGER.trace("Endpoint updated: " + endpoint.getEndpointId());
final Device device = context.getDevice(endpoint.getEndpointId());
if (device != null) {
context.updateDevice(device);
}
}
@Override
public void endpointRemoved(final ZigBeeEndpoint endpoint) {
LOGGER.trace("Endpoint removed: " + endpoint.getEndpointId());
final Device device = context.getDevice(endpoint.getEndpointId());
if (device != null) {
context.removeDevice(device);
}
}
@Override
public void deviceAdded(final Device device) {
LOGGER.debug(device.getClass().getSimpleName() +
" added: " + device.getEndpoint().getEndpointId());
}
@Override
public void deviceUpdated(final Device device) {
LOGGER.trace(device.getClass().getSimpleName() +
" updated: " + device.getEndpoint().getEndpointId());
}
@Override
public void deviceRemoved(final Device device) {
LOGGER.debug(device.getClass().getSimpleName() +
" removed: " + device.getEndpoint().getEndpointId());
}
}
| Set startup state of permit join to disabled
This increases security as devices are only allowed to join the network
when explicitly allowed.
To re-enable permit join use one of the permitJoin methods.
| zigbee-api/src/main/java/org/bubblecloud/zigbee/ZigBeeApi.java | Set startup state of permit join to disabled This increases security as devices are only allowed to join the network when explicitly allowed. To re-enable permit join use one of the permitJoin methods. |
|
Java | apache-2.0 | 0297ff3e53f7057bfaa1c5516f61e3f30579b5a8 | 0 | dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android | package org.commcare.activities;
import android.graphics.Color;
import android.os.Bundle;
import android.support.v4.util.Pair;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.LinearLayout;
import android.widget.TextView;
import org.commcare.CommCareApplication;
import org.commcare.dalvik.R;
import org.commcare.interfaces.HttpResponseProcessor;
import org.commcare.models.AndroidSessionWrapper;
import org.commcare.network.ModernHttpRequester;
import org.commcare.session.RemoteQuerySessionManager;
import org.commcare.suite.model.DisplayData;
import org.commcare.suite.model.DisplayUnit;
import org.commcare.suite.model.RemoteQueryDatum;
import org.commcare.suite.model.SessionDatum;
import org.commcare.tasks.SimpleHttpTask;
import org.commcare.tasks.templates.CommCareTaskConnector;
import org.commcare.views.ManagedUi;
import org.commcare.views.UiElement;
import org.commcare.views.dialogs.CustomProgressDialog;
import org.commcare.views.media.MediaLayout;
import org.javarosa.core.model.instance.ExternalDataInstance;
import org.javarosa.core.model.instance.TreeElement;
import org.javarosa.core.services.locale.Localization;
import org.javarosa.core.services.locale.Localizer;
import org.javarosa.xml.ElementParser;
import org.javarosa.xml.TreeElementParser;
import org.javarosa.xml.util.InvalidStructureException;
import org.javarosa.xml.util.UnfullfilledRequirementsException;
import org.kxml2.io.KXmlParser;
import org.xmlpull.v1.XmlPullParserException;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Hashtable;
import java.util.Map;
/**
* Collects 'query datum' in the current session. Prompts user for query
* params, makes query to server and stores xml 'fixture' response into current
* session. Allows for 'case search and claim' workflow when used inside a
* 'sync-request' entry in conjuction with entity select datum and sync
*
* @author Phillip Mates ([email protected]).
*/
@ManagedUi(R.layout.http_request_layout)
public class QueryRequestActivity
extends SaveSessionCommCareActivity<QueryRequestActivity>
implements HttpResponseProcessor {
private static final String TAG = QueryRequestActivity.class.getSimpleName();
private static final String ANSWERED_USER_PROMPTS_KEY = "answered_user_prompts";
private static final String IN_ERROR_STATE_KEY = "in-error-state-key";
private static final String ERROR_MESSAGE_KEY = "error-message-key";
@UiElement(value = R.id.request_button, locale = "query.button")
private Button queryButton;
@UiElement(value = R.id.error_message)
private TextView errorTextView;
private boolean inErrorState;
private String errorMessage;
private RemoteQuerySessionManager remoteQuerySessionManager;
private Hashtable<String, EditText> promptsBoxes = new Hashtable<>();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
remoteQuerySessionManager =
buildQuerySessionManager(CommCareApplication._().getCurrentSessionWrapper());
if (remoteQuerySessionManager == null) {
Log.e(TAG, "Tried to launch remote query activity at wrong time in session.");
setResult(RESULT_CANCELED);
finish();
} else {
loadStateFromSavedInstance(savedInstanceState);
setupUI();
}
}
private void setupUI() {
buildPromptUI();
queryButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
answerPrompts();
makeQueryRequest();
}
});
}
private void buildPromptUI() {
LinearLayout promptsLayout = (LinearLayout)findViewById(R.id.query_prompts);
Hashtable<String, DisplayUnit> userInputDisplays =
remoteQuerySessionManager.getNeededUserInputDisplays();
for (Map.Entry<String, DisplayUnit> displayEntry : userInputDisplays.entrySet()) {
buildPromptEntry(promptsLayout, displayEntry.getKey(), displayEntry.getValue());
}
}
private void buildPromptEntry(LinearLayout promptsLayout,
String promptId,
DisplayUnit displayUnit) {
Hashtable<String, String> userAnswers =
remoteQuerySessionManager.getUserAnswers();
promptsLayout.addView(createPromptEntry(displayUnit));
EditText promptEditText = new EditText(this);
if (userAnswers.containsKey(promptId)) {
promptEditText.setText(userAnswers.get(promptId));
}
promptEditText.setBackgroundResource(R.drawable.login_edit_text);
promptsLayout.addView(promptEditText);
promptsBoxes.put(promptId, promptEditText);
}
private void answerPrompts() {
for (Map.Entry<String, EditText> promptEntry : promptsBoxes.entrySet()) {
String promptText = promptEntry.getValue().getText().toString();
if (!"".equals(promptText)) {
remoteQuerySessionManager.answerUserPrompt(promptEntry.getKey(), promptText);
}
}
}
private void makeQueryRequest() {
clearErrorState();
URL url = null;
String urlString = remoteQuerySessionManager.getBaseUrl();
try {
url = new URL(urlString);
} catch (MalformedURLException e) {
enterErrorState(Localization.get("post.malformed.url", urlString));
}
if (url != null) {
SimpleHttpTask httpTask;
try {
httpTask = new SimpleHttpTask(this, url, remoteQuerySessionManager.getRawQueryParams(), false);
} catch (ModernHttpRequester.PlainTextPasswordException e) {
enterErrorState(Localization.get("post.not.using.https", url.toString()));
return;
}
httpTask.connect((CommCareTaskConnector)this);
httpTask.executeParallel();
}
}
private void clearErrorState() {
errorMessage = "";
inErrorState = false;
}
private void enterErrorState(String message) {
errorMessage = message;
enterErrorState();
}
private void enterErrorState() {
inErrorState = true;
Log.e(TAG, errorMessage);
errorTextView.setText(errorMessage);
errorTextView.setVisibility(View.VISIBLE);
}
@Override
protected void onSaveInstanceState(Bundle savedInstanceState) {
super.onSaveInstanceState(savedInstanceState);
answerPrompts();
savedInstanceState.putSerializable(ANSWERED_USER_PROMPTS_KEY,
remoteQuerySessionManager.getUserAnswers());
savedInstanceState.putString(ERROR_MESSAGE_KEY, errorMessage);
savedInstanceState.putBoolean(IN_ERROR_STATE_KEY, inErrorState);
}
private static RemoteQuerySessionManager buildQuerySessionManager(AndroidSessionWrapper sessionWrapper) {
SessionDatum datum;
try {
datum = sessionWrapper.getSession().getNeededDatum();
} catch (NullPointerException e) {
// tried loading session info when it wasn't there
return null;
}
if (datum instanceof RemoteQueryDatum) {
return new RemoteQuerySessionManager((RemoteQueryDatum)datum, sessionWrapper.getEvaluationContext());
} else {
return null;
}
}
private void loadStateFromSavedInstance(Bundle savedInstanceState) {
if (savedInstanceState != null) {
errorMessage = savedInstanceState.getString(ERROR_MESSAGE_KEY);
inErrorState = savedInstanceState.getBoolean(IN_ERROR_STATE_KEY);
Hashtable<String, String> answeredPrompts =
(Hashtable<String, String>)savedInstanceState.getSerializable(ANSWERED_USER_PROMPTS_KEY);
if (answeredPrompts != null) {
for (Map.Entry<String, String> entry : answeredPrompts.entrySet()) {
remoteQuerySessionManager.answerUserPrompt(entry.getKey(), entry.getValue());
}
}
}
}
private MediaLayout createPromptEntry(DisplayUnit display) {
DisplayData mData = display.evaluate();
String str = Localizer.processArguments(mData.getName(), new String[]{""}).trim();
TextView text = new TextView(getApplicationContext());
text.setText(str);
int padding = (int)getResources().getDimension(R.dimen.help_text_padding);
text.setPadding(0, 0, 0, 7);
MediaLayout helpLayout = new MediaLayout(this);
helpLayout.setAVT(text, mData.getAudioURI(), mData.getImageURI(), null, null);
helpLayout.setPadding(padding, padding, padding, padding);
text.setTextColor(Color.BLACK);
return helpLayout;
}
@Override
public void processSuccess(int responseCode, InputStream responseData) {
Pair<ExternalDataInstance, String> instanceOrError =
buildExternalDataInstance(responseData,
remoteQuerySessionManager.getStorageInstanceName());
if (instanceOrError.first == null) {
enterErrorState(Localization.get("query.response.format.error", instanceOrError.second));
} else {
CommCareApplication._().getCurrentSession().setQueryDatum(instanceOrError.first);
setResult(RESULT_OK);
finish();
}
}
public static Pair<ExternalDataInstance, String> buildExternalDataInstance(InputStream instanceStream,
String instanceId) {
TreeElement root;
try {
KXmlParser baseParser = ElementParser.instantiateParser(instanceStream);
root = new TreeElementParser(baseParser, 0, instanceId).parse();
} catch (InvalidStructureException | IOException
| XmlPullParserException | UnfullfilledRequirementsException e) {
return new Pair<>(null, e.getMessage());
}
return new Pair<>(ExternalDataInstance.buildFromRemote(instanceId, root), "");
}
@Override
public void processRedirection(int responseCode) {
enterErrorState(Localization.get("post.redirection.error", responseCode + ""));
}
@Override
public void processClientError(int responseCode) {
enterErrorState(Localization.get("post.client.error", responseCode + ""));
}
@Override
public void processServerError(int responseCode) {
enterErrorState(Localization.get("post.server.error", responseCode + ""));
}
@Override
public void processOther(int responseCode) {
enterErrorState(Localization.get("post.unknown.response", responseCode + ""));
}
@Override
public void handleIOException(IOException exception) {
enterErrorState(Localization.get("post.io.error", exception.getMessage()));
}
@Override
public CustomProgressDialog generateProgressDialog(int taskId) {
String title, message;
switch (taskId) {
case SimpleHttpTask.SIMPLE_HTTP_TASK_ID:
title = Localization.get("query.dialog.title");
message = Localization.get("query.dialog.body");
break;
default:
Log.w(TAG, "taskId passed to generateProgressDialog does not match "
+ "any valid possibilities in CommCareHomeActivity");
return null;
}
return CustomProgressDialog.newInstance(title, message, taskId);
}
}
| app/src/org/commcare/activities/QueryRequestActivity.java | package org.commcare.activities;
import android.graphics.Color;
import android.os.Bundle;
import android.support.v4.util.Pair;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.LinearLayout;
import android.widget.TextView;
import org.commcare.CommCareApplication;
import org.commcare.dalvik.R;
import org.commcare.interfaces.HttpResponseProcessor;
import org.commcare.models.AndroidSessionWrapper;
import org.commcare.network.ModernHttpRequester;
import org.commcare.session.RemoteQuerySessionManager;
import org.commcare.suite.model.DisplayData;
import org.commcare.suite.model.DisplayUnit;
import org.commcare.suite.model.RemoteQueryDatum;
import org.commcare.suite.model.SessionDatum;
import org.commcare.tasks.SimpleHttpTask;
import org.commcare.tasks.templates.CommCareTaskConnector;
import org.commcare.views.ManagedUi;
import org.commcare.views.UiElement;
import org.commcare.views.dialogs.CustomProgressDialog;
import org.commcare.views.media.MediaLayout;
import org.javarosa.core.model.instance.ExternalDataInstance;
import org.javarosa.core.model.instance.TreeElement;
import org.javarosa.core.services.locale.Localization;
import org.javarosa.core.services.locale.Localizer;
import org.javarosa.xml.ElementParser;
import org.javarosa.xml.TreeElementParser;
import org.javarosa.xml.util.InvalidStructureException;
import org.javarosa.xml.util.UnfullfilledRequirementsException;
import org.xmlpull.v1.XmlPullParserException;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Hashtable;
import java.util.Map;
/**
* Collects 'query datum' in the current session. Prompts user for query
* params, makes query to server and stores xml 'fixture' response into current
* session. Allows for 'case search and claim' workflow when used inside a
* 'sync-request' entry in conjuction with entity select datum and sync
*
* @author Phillip Mates ([email protected]).
*/
@ManagedUi(R.layout.http_request_layout)
public class QueryRequestActivity
extends SaveSessionCommCareActivity<QueryRequestActivity>
implements HttpResponseProcessor {
private static final String TAG = QueryRequestActivity.class.getSimpleName();
private static final String ANSWERED_USER_PROMPTS_KEY = "answered_user_prompts";
private static final String IN_ERROR_STATE_KEY = "in-error-state-key";
private static final String ERROR_MESSAGE_KEY = "error-message-key";
@UiElement(value = R.id.request_button, locale = "query.button")
private Button queryButton;
@UiElement(value = R.id.error_message)
private TextView errorTextView;
private boolean inErrorState;
private String errorMessage;
private RemoteQuerySessionManager remoteQuerySessionManager;
private Hashtable<String, EditText> promptsBoxes = new Hashtable<>();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
remoteQuerySessionManager =
buildQuerySessionManager(CommCareApplication._().getCurrentSessionWrapper());
if (remoteQuerySessionManager == null) {
Log.e(TAG, "Tried to launch remote query activity at wrong time in session.");
setResult(RESULT_CANCELED);
finish();
} else {
loadStateFromSavedInstance(savedInstanceState);
setupUI();
}
}
private void setupUI() {
buildPromptUI();
queryButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
answerPrompts();
makeQueryRequest();
}
});
}
private void buildPromptUI() {
LinearLayout promptsLayout = (LinearLayout)findViewById(R.id.query_prompts);
Hashtable<String, DisplayUnit> userInputDisplays =
remoteQuerySessionManager.getNeededUserInputDisplays();
for (Map.Entry<String, DisplayUnit> displayEntry : userInputDisplays.entrySet()) {
buildPromptEntry(promptsLayout, displayEntry.getKey(), displayEntry.getValue());
}
}
private void buildPromptEntry(LinearLayout promptsLayout, String promptId, DisplayUnit displayUnit) {
Hashtable<String, String> userAnswers = remoteQuerySessionManager.getUserAnswers();
promptsLayout.addView(createPromptEntry(displayUnit));
EditText promptEditText = new EditText(this);
if (userAnswers.containsKey(promptId)) {
promptEditText.setText(userAnswers.get(promptId));
}
promptEditText.setBackgroundResource(R.drawable.login_edit_text);
promptsLayout.addView(promptEditText);
promptsBoxes.put(promptId, promptEditText);
}
private void answerPrompts() {
for (Map.Entry<String, EditText> promptEntry : promptsBoxes.entrySet()) {
String promptText = promptEntry.getValue().getText().toString();
if (!"".equals(promptText)) {
remoteQuerySessionManager.answerUserPrompt(promptEntry.getKey(), promptText);
}
}
}
private void makeQueryRequest() {
errorMessage = "";
inErrorState = false;
URL url = null;
String urlString = remoteQuerySessionManager.getBaseUrl();
try {
url = new URL(urlString);
} catch (MalformedURLException e) {
enterErrorState(Localization.get("post.malformed.url", urlString));
}
if (url != null) {
SimpleHttpTask httpTask;
try {
httpTask = new SimpleHttpTask(this, url, remoteQuerySessionManager.getRawQueryParams(), false);
} catch (ModernHttpRequester.PlainTextPasswordException e) {
enterErrorState(Localization.get("post.not.using.https", url.toString()));
return;
}
httpTask.connect((CommCareTaskConnector)this);
httpTask.executeParallel();
}
}
private void enterErrorState(String message) {
errorMessage = message;
enterErrorState();
}
private void enterErrorState() {
inErrorState = true;
Log.e(TAG, errorMessage);
errorTextView.setText(errorMessage);
errorTextView.setVisibility(View.VISIBLE);
}
@Override
protected void onSaveInstanceState(Bundle savedInstanceState) {
super.onSaveInstanceState(savedInstanceState);
answerPrompts();
savedInstanceState.putSerializable(ANSWERED_USER_PROMPTS_KEY,
remoteQuerySessionManager.getUserAnswers());
savedInstanceState.putString(ERROR_MESSAGE_KEY, errorMessage);
savedInstanceState.putBoolean(IN_ERROR_STATE_KEY, inErrorState);
}
private static RemoteQuerySessionManager buildQuerySessionManager(AndroidSessionWrapper sessionWrapper) {
SessionDatum datum;
try {
datum = sessionWrapper.getSession().getNeededDatum();
} catch (NullPointerException e) {
// tried loading session info when it wasn't there
return null;
}
if (datum instanceof RemoteQueryDatum) {
return new RemoteQuerySessionManager((RemoteQueryDatum)datum, sessionWrapper.getEvaluationContext());
} else {
return null;
}
}
private void loadStateFromSavedInstance(Bundle savedInstanceState) {
if (savedInstanceState != null) {
errorMessage = savedInstanceState.getString(ERROR_MESSAGE_KEY);
inErrorState = savedInstanceState.getBoolean(IN_ERROR_STATE_KEY);
Hashtable<String, String> answeredPrompts =
(Hashtable<String, String>)savedInstanceState.getSerializable(ANSWERED_USER_PROMPTS_KEY);
if (answeredPrompts != null) {
for (Map.Entry<String, String> entry : answeredPrompts.entrySet()) {
remoteQuerySessionManager.answerUserPrompt(entry.getKey(), entry.getValue());
}
}
}
}
private MediaLayout createPromptEntry(DisplayUnit display) {
DisplayData mData = display.evaluate();
String str = Localizer.processArguments(mData.getName(), new String[]{""}).trim();
TextView text = new TextView(getApplicationContext());
text.setText(str);
int padding = (int)getResources().getDimension(R.dimen.help_text_padding);
text.setPadding(0, 0, 0, 7);
MediaLayout helpLayout = new MediaLayout(this);
helpLayout.setAVT(text, mData.getAudioURI(), mData.getImageURI(), null, null);
helpLayout.setPadding(padding, padding, padding, padding);
text.setTextColor(Color.BLACK);
return helpLayout;
}
@Override
public void processSuccess(int responseCode, InputStream responseData) {
Pair<ExternalDataInstance, String> instanceOrError =
buildExternalDataInstance(responseData,
remoteQuerySessionManager.getStorageInstanceName());
if (instanceOrError.first == null) {
enterErrorState(Localization.get("query.response.format.error", instanceOrError.second));
} else {
CommCareApplication._().getCurrentSession().setQueryDatum(instanceOrError.first);
setResult(RESULT_OK);
finish();
}
}
public static Pair<ExternalDataInstance, String> buildExternalDataInstance(InputStream instanceStream, String instanceId) {
TreeElement root;
try {
root = new TreeElementParser(ElementParser.instantiateParser(instanceStream), 0, instanceId).parse();
} catch (InvalidStructureException | IOException
| XmlPullParserException | UnfullfilledRequirementsException e) {
return new Pair<>(null, e.getMessage());
}
return new Pair<>(ExternalDataInstance.buildFromRemote(instanceId, root), "");
}
@Override
public void processRedirection(int responseCode) {
enterErrorState(Localization.get("post.redirection.error", responseCode + ""));
}
@Override
public void processClientError(int responseCode) {
enterErrorState(Localization.get("post.client.error", responseCode + ""));
}
@Override
public void processServerError(int responseCode) {
enterErrorState(Localization.get("post.server.error", responseCode + ""));
}
@Override
public void processOther(int responseCode) {
enterErrorState(Localization.get("post.unknown.response", responseCode + ""));
}
@Override
public void handleIOException(IOException exception) {
enterErrorState(Localization.get("post.io.error", exception.getMessage()));
}
@Override
public CustomProgressDialog generateProgressDialog(int taskId) {
String title, message;
switch (taskId) {
case SimpleHttpTask.SIMPLE_HTTP_TASK_ID:
title = Localization.get("query.dialog.title");
message = Localization.get("query.dialog.body");
break;
default:
Log.w(TAG, "taskId passed to generateProgressDialog does not match "
+ "any valid possibilities in CommCareHomeActivity");
return null;
}
return CustomProgressDialog.newInstance(title, message, taskId);
}
}
| Line length
| app/src/org/commcare/activities/QueryRequestActivity.java | Line length |
|
Java | apache-2.0 | 2d2a682fd640d6ec814872b13ed69748e3b404f3 | 0 | mtransitapps/ca-red-deer-transit-bus-parser | package org.mtransit.parser.ca_red_deer_transit_bus;
import org.apache.commons.lang3.StringUtils;
import org.mtransit.parser.CleanUtils;
import org.mtransit.parser.ColorUtils;
import org.mtransit.parser.DefaultAgencyTools;
import org.mtransit.parser.MTLog;
import org.mtransit.parser.Pair;
import org.mtransit.parser.SplitUtils;
import org.mtransit.parser.SplitUtils.RouteTripSpec;
import org.mtransit.parser.Utils;
import org.mtransit.parser.gtfs.data.GCalendar;
import org.mtransit.parser.gtfs.data.GCalendarDate;
import org.mtransit.parser.gtfs.data.GRoute;
import org.mtransit.parser.gtfs.data.GSpec;
import org.mtransit.parser.gtfs.data.GStop;
import org.mtransit.parser.gtfs.data.GTrip;
import org.mtransit.parser.gtfs.data.GTripStop;
import org.mtransit.parser.mt.data.MAgency;
import org.mtransit.parser.mt.data.MDirectionType;
import org.mtransit.parser.mt.data.MInboundType;
import org.mtransit.parser.mt.data.MRoute;
import org.mtransit.parser.mt.data.MTrip;
import org.mtransit.parser.mt.data.MTripStop;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
// https://data.reddeer.ca/gtfsdatafeed
// https://data.reddeer.ca/data/GTFS/RD_GTFS.zip
// OTHER: https://webmap.reddeer.ca/transit/google_transit.zip
public class RedDeerTransitBusAgencyTools extends DefaultAgencyTools {
public static void main(String[] args) {
if (args == null || args.length == 0) {
args = new String[3];
args[0] = "input/gtfs.zip";
args[1] = "../../mtransitapps/ca-red-deer-transit-bus-android/res/raw/";
args[2] = ""; // files-prefix
}
new RedDeerTransitBusAgencyTools().start(args);
}
private HashSet<String> serviceIds;
@Override
public void start(String[] args) {
MTLog.log("Generating Red Deer Transit bus data...");
long start = System.currentTimeMillis();
this.serviceIds = extractUsefulServiceIds(args, this, true);
super.start(args);
MTLog.log("Generating Red Deer Transit bus data... DONE in %s.", Utils.getPrettyDuration(System.currentTimeMillis() - start));
}
@Override
public boolean excludingAll() {
return this.serviceIds != null && this.serviceIds.isEmpty();
}
@Override
public boolean excludeCalendar(GCalendar gCalendar) {
if (this.serviceIds != null) {
return excludeUselessCalendar(gCalendar, this.serviceIds);
}
return super.excludeCalendar(gCalendar);
}
@Override
public boolean excludeCalendarDate(GCalendarDate gCalendarDates) {
if (this.serviceIds != null) {
return excludeUselessCalendarDate(gCalendarDates, this.serviceIds);
}
return super.excludeCalendarDate(gCalendarDates);
}
@Override
public boolean excludeTrip(GTrip gTrip) {
if (this.serviceIds != null) {
return excludeUselessTrip(gTrip, this.serviceIds);
}
return super.excludeTrip(gTrip);
}
@Override
public boolean excludeRoute(GRoute gRoute) {
return super.excludeRoute(gRoute);
}
@Override
public Integer getAgencyRouteType() {
return MAgency.ROUTE_TYPE_BUS;
}
private static final Pattern DIGITS = Pattern.compile("[\\d]+");
private static final String A = "A";
private static final long ROUTE_ID_ENDS_WITH_A = 10_000L;
@Override
public long getRouteId(GRoute gRoute) {
if (Utils.isDigitsOnly(gRoute.getRouteShortName())) {
return Long.parseLong(gRoute.getRouteShortName()); // use route short name as route ID
}
Matcher matcher = DIGITS.matcher(gRoute.getRouteShortName());
if (matcher.find()) {
long id = Long.parseLong(matcher.group());
if (gRoute.getRouteShortName().endsWith(A)) {
return ROUTE_ID_ENDS_WITH_A + id;
}
}
MTLog.logFatal("Unexpected route ID for %s!", gRoute);
return -1L;
}
private static final String YELLOW_SCHOOL_BUS_COLOR = "FFD800";
@SuppressWarnings("DuplicateBranchesInSwitch")
@Override
public String getRouteColor(GRoute gRoute) {
String routeColor = gRoute.getRouteColor();
if (ColorUtils.WHITE.equalsIgnoreCase(routeColor)) {
routeColor = null;
}
if (StringUtils.isEmpty(routeColor)) {
if (Utils.isDigitsOnly(gRoute.getRouteShortName())) {
int rsn = Integer.parseInt(gRoute.getRouteShortName());
switch (rsn) {
// @formatter:off
case 1: return "B60000";
case 2: return "32BEBB";
case 3: return "B48ACA";
case 4: return "FE0002";
case 5: return "793E95";
case 6: return "000000";
case 7: return "0000B6";
case 8: return "2DA9AA";
case 9: return "8E8E8E";
case 10: return "E95393";
case 11: return "FFB61A";
case 12: return "217E7D";
case 20: return YELLOW_SCHOOL_BUS_COLOR;
case 21: return YELLOW_SCHOOL_BUS_COLOR;
case 22: return YELLOW_SCHOOL_BUS_COLOR;
case 23: return YELLOW_SCHOOL_BUS_COLOR;
case 24: return YELLOW_SCHOOL_BUS_COLOR;
case 25: return YELLOW_SCHOOL_BUS_COLOR;
case 26: return YELLOW_SCHOOL_BUS_COLOR;
case 27: return YELLOW_SCHOOL_BUS_COLOR;
case 28: return YELLOW_SCHOOL_BUS_COLOR;
case 29: return YELLOW_SCHOOL_BUS_COLOR;
case 30: return YELLOW_SCHOOL_BUS_COLOR;
case 31: return YELLOW_SCHOOL_BUS_COLOR;
case 32: return YELLOW_SCHOOL_BUS_COLOR;
case 33: return YELLOW_SCHOOL_BUS_COLOR;
case 34: return YELLOW_SCHOOL_BUS_COLOR;
case 35: return YELLOW_SCHOOL_BUS_COLOR;
case 36: return YELLOW_SCHOOL_BUS_COLOR;
case 37: return YELLOW_SCHOOL_BUS_COLOR;
case 38: return YELLOW_SCHOOL_BUS_COLOR;
case 39: return YELLOW_SCHOOL_BUS_COLOR;
case 40: return YELLOW_SCHOOL_BUS_COLOR;
case 41: return YELLOW_SCHOOL_BUS_COLOR;
case 50: return "000000";
case 51: return "5DCDF3";
case 52: return "01B601";
case 53: return "FE0000";
case 54: return "6A3683";
case 100: return "016E01";
case 101: return "0000B6";
case 103: return null; // TODO?
case 104: return null; // TODO?
// @formatter:on
}
}
if ("12A".equalsIgnoreCase(gRoute.getRouteShortName())) {
return "AE7B10";
}
if ("35A".equalsIgnoreCase(gRoute.getRouteShortName())) {
return YELLOW_SCHOOL_BUS_COLOR;
}
MTLog.logFatal("Unexpected route color '%s'", gRoute);
return null;
}
return routeColor;
}
private static final Pattern ROUTE_RSN = Pattern.compile("(route [\\d]+[a-zA-Z]?)", Pattern.CASE_INSENSITIVE);
private static final String _SLASH_ = " / ";
@SuppressWarnings("DuplicateBranchesInSwitch")
@Override
public String getRouteLongName(GRoute gRoute) {
String routeLongName = gRoute.getRouteLongName();
routeLongName = cleanRouteLongName(routeLongName);
if (StringUtils.isEmpty(routeLongName)) {
if (Utils.isDigitsOnly(gRoute.getRouteShortName())) {
int rsn = Integer.parseInt(gRoute.getRouteShortName());
switch (rsn) {
// @formatter:off
case 1: return "South Hl" + _SLASH_ + "Inglewood";
case 2: return "Oriole Pk" + _SLASH_ + "Johnstone Pk South";
case 3: return "College" + _SLASH_ + "Anders Pk";
case 4: return "Glendale" + _SLASH_ + "Kentwood (West)";
case 5: return "Rosedale South" + _SLASH_ + "Deer Pk";
case 6: return "Clearview Rdg" + _SLASH_ + "Timberlands";
case 7: return "Morrisroe" + _SLASH_ + "Vanier Woods";
case 8: return "Pines" + _SLASH_ + "Normandeau";
case 9: return "Eastview" + _SLASH_ + "Inglewood";
case 10: return "West Pk" + _SLASH_ + "Gaetz Ave South";
case 11: return "Johnstone Pk (North)" + _SLASH_ + "GH Dawe";
case 12: return "Gasoline Alley";
case 20: return "Lindsay Thurber" + _SLASH_ + "Oriole Pk";
case 21: return "Lindsay Thurber" + _SLASH_ + "Normandeau" + _SLASH_ + "Glendale";
case 22: return "Lindsay Thurber" + _SLASH_ + "Normandeau";
case 23: return "Lindsay Thurber" + _SLASH_ + "Eastview Ests ";
case 24: return "Lindsay Thurber" + _SLASH_ + "East Hl";
case 25: return "Lindsay Thurber" + _SLASH_ + "Johnstone Pk";
case 26: return "Hunting Hls" + _SLASH_ + "West Pk";
case 27: return "Hunting Hls" + _SLASH_ + "Eastview Ests";
case 28: return "Eastview Middle School" + _SLASH_ + "Eastview Ests";
case 29: return "Notre Dame" + _SLASH_ + "Hunting Hls" + _SLASH_ + "City Ctr Sorensen Sta";
case 30: return "City Ctr" + _SLASH_ + "Sorensen Sta";
case 31: return "Saint Joseph School" + _SLASH_ + "City Ctr" + _SLASH_ + "Sorensen Sta";
case 32: return "Central Middle School" + _SLASH_ + "Normandeau" + _SLASH_ + "Timberlands";
case 33: return "Lindsay Thurber" + _SLASH_ + "City Ctr" + _SLASH_ + "Sorensen Sta";
case 34: return "Saint Joseph School" + _SLASH_ + "Normandeau" + _SLASH_ + "Highland Grn";
case 35: return "Central Middle School" + _SLASH_ + "Fairview" + _SLASH_ + "Riverside Mdws";
case 36: return "City Ctr" + _SLASH_ + "Lazy Bus";
case 37: return "Saint Joseph School" + _SLASH_ + "Timberlands" + _SLASH_ + "Rosedale";
case 38: return "Clearview" + _SLASH_ + "Deer" + _SLASH_ + "Timberlands" + _SLASH_ + "Rosedale";
case 39: return "Hunting Hls" + _SLASH_ + "Eastview School" + _SLASH_ + "Morrisroe" + _SLASH_ + "Lancaster";
case 40: return "Saint Joseph School" + _SLASH_ + "Johnstone Pk" + _SLASH_ + "Kentwood";
case 41: return "Saint Joseph School" + _SLASH_ + "Kentwood" + _SLASH_ + "Glendale";
case 50: return "Edgar Ind Pk";
case 51: return "Gaetz Ave North" + _SLASH_ + "Riverside Ind";
case 52: return "Riverside Ind Pk" + _SLASH_ + "Olymel";
case 53: return "Riverside Ind Pk" + _SLASH_ + "Olymel";
case 54: return "Riverside Ind Pk" + _SLASH_ + "Olymel";
case 100: return "Lacombe Blackfalds Express";
case 101: return "Lacombe Blackfalds Local";
case 103: return "Springbrook, Penhold, Innisfail, Bower Mall";
case 104: return "Springbrook, Penhold, Innisfail, Bower Mall";
// @formatter:on
}
}
if ("12A".equalsIgnoreCase(gRoute.getRouteShortName())) {
return "Gasoline Alley" + _SLASH_ + "Springbrook";
}
if ("35A".equalsIgnoreCase(gRoute.getRouteShortName())) {
return "Central Middle School" + _SLASH_ + "Oriole Pk";
}
MTLog.logFatal("Unexpected route long name '%s'", gRoute);
return null;
}
return routeLongName;
}
private String cleanRouteLongName(String routeLongName) {
routeLongName = ROUTE_RSN.matcher(routeLongName).replaceAll(StringUtils.EMPTY);
routeLongName = INDUSTRIAL.matcher(routeLongName).replaceAll(INDUSTRIAL_REPLACEMENT);
routeLongName = CleanUtils.removePoints(routeLongName);
routeLongName = CleanUtils.cleanStreetTypes(routeLongName);
return routeLongName;
}
private static final String AGENCY_COLOR_RED = "BF311A"; // RED (from web site CSS)
private static final String AGENCY_COLOR = AGENCY_COLOR_RED;
@Override
public String getAgencyColor() {
return AGENCY_COLOR;
}
private static HashMap<Long, RouteTripSpec> ALL_ROUTE_TRIPS2;
static {
HashMap<Long, RouteTripSpec> map2 = new HashMap<>();
map2.put(1L, new RouteTripSpec(1L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Inglewood", // South Hl
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("935") // EB VANIER DR @ 30 AV
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("934"), // WB IRONSTONE DR @ 30 AV
Stops.getALL_STOPS().get("962"), // NB 49 AV @ 34 ST
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(2L, new RouteTripSpec(2L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Johnstone Pk", // Oriole Pk
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("788") // EB JEWELL ST @ TAYLOR DR
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("786"), // WB JEWELL ST @ TAYLOR DR
Stops.getALL_STOPS().get("646"), // NB KERRY WOOD DR @ FERN RD
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(3L, new RouteTripSpec(3L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Anders Pk", //
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("1096") // EB AVERY ST @ AMER CL
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1097"), // WB AVERY ST @ 30 AV
Stops.getALL_STOPS().get("734"), // NB 54 AV @ 45 ST
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(4L, new RouteTripSpec(4L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Kentwood", // Glendale
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("795") // WB JORDAN PKY @ TAYLOR DR
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("794"), // EB JORDAN PARKWAY @ STN 5
Stops.getALL_STOPS().get("1058"), // SB GAETZ AV @ VILLAGE MALL
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(5L, new RouteTripSpec(5L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Collicutt Ctr", // "Deer Pk", // Rosedale South
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // == Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("1003"), // != EB 49 ST @ 48 AV
Stops.getALL_STOPS().get("1005"), // == NB 47 AV @ 51 ST
Stops.getALL_STOPS().get("1362"), // == != EB 50 ST @30 AV
Stops.getALL_STOPS().get("1364"), // != WB ROLAND ST @ ROBERTS CR
Stops.getALL_STOPS().get("1366"), // != WB ROLAND ST @ ROBERTS CR
Stops.getALL_STOPS().get("1363"), // != <> NB RUTHERFORD DR @ RUTHERFORD CL
Stops.getALL_STOPS().get("1365"), // != <> EB ROLAND ST @ ROBERTS CR
Stops.getALL_STOPS().get("1173"), // == != SB RIDEOUT AV @REICHLEY ST
Stops.getALL_STOPS().get("1326"), // == SB DAINES @ DUSTON ST
Stops.getALL_STOPS().get("1211"), // != SB LAWFORD AV @ 32 ST
Stops.getALL_STOPS().get("1133"), // != NB LOCKWOOD AV @ LANCASTER DR =>
Stops.getALL_STOPS().get("1155"), // != WB 32 ST @ DAINES AV
Stops.getALL_STOPS().get("1097") // != WB AVERY ST @ 30 AV =>
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1096"), // EB AVERY ST @ AMER CL
Stops.getALL_STOPS().get("1174"), // != NB RIDEOUT AV @50 ST
Stops.getALL_STOPS().get("1365"), // <> EB ROLAND ST @ ROBERTS CR
Stops.getALL_STOPS().get("1363"), // <> NB RUTHERFORD DR @ RUTHERFORD CL
Stops.getALL_STOPS().get("1121"), // != WB 50 ST @ 30 AV
Stops.getALL_STOPS().get("1227"), // WB 55 ST @ 42A AV
Stops.getALL_STOPS().get("1006"), // SB 47 AV @ 55 ST
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(6L, new RouteTripSpec(6L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Garden Hts", // "Clearview Rdg" + _SLASH_ + "Timberlands", //
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("1378"), // EB TIMBERLANDS DR @ 30 AV
Stops.getALL_STOPS().get("1355") // NB GARDEN GT @ GREENWAY ST
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1355"), // NB GARDEN GT @ GREENWAY ST
Stops.getALL_STOPS().get("1243"), // EB TIMOTHY DR @ TOBIN GT
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(7L, new RouteTripSpec(7L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Vanier Woods", // Morrisroe /
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("934") // WB IRONSTONE DR @ 30 AV
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("935"), // EB VANIER DR @ 30 AV
Stops.getALL_STOPS().get("1133"), // NB LOCKWOOD AV @ LANCASTER DR
Stops.getALL_STOPS().get("1020"), // WB 35 ST @ 43 AV
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(8L, new RouteTripSpec(8L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Normandeau", // Pines
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("786") // WB JEWELL ST @ TAYLOR DR
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("788"), // EB JEWELL ST @ TAYLOR DR
Stops.getALL_STOPS().get("1058"), // SB GAETZ AV @ VILLAGE MALL
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(9L, new RouteTripSpec(9L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Inglewood", // Eastview
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("1134"), // SB LOCKWOOD AV @ 32 ST
Stops.getALL_STOPS().get("912") // WB IRONSIDE ST @ INGLIS CR
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("911"), // EB IRONSIDE ST @ 40 AV
Stops.getALL_STOPS().get("1046"), // WB 44 ST @ 40 AV
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(10L, new RouteTripSpec(10L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Inglewood", // West Pk / Gaetz Ave South
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("911") // EB IRONSIDE ST @ 40 AV
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("912"), // WB IRONSIDE ST @ INGLIS CR
Stops.getALL_STOPS().get("897"), // NB 50 AV @ BENNETT
Stops.getALL_STOPS().get("733"), // EB 43 ST @ TAYLOR DR
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(11L, new RouteTripSpec(11L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Johnstone Pk", // GH Dawe
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("794") // EB JORDAN PARKWAY @ STN 5
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("795"), // WB JORDAN PKY @ TAYLOR DR
Stops.getALL_STOPS().get("760"), // EB HORN ST @ TAYLOR DR
Stops.getALL_STOPS().get("1058"), // SB GAETZ AV @ VILLAGE MALL
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(12L, new RouteTripSpec(12L, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.NORTH.getId(), //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.SOUTH.getId()) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("877"), // NB LEVA AV @ LAKE ST
Stops.getALL_STOPS().get("656"), // NB TAYLOR DR @ 19 ST
Stops.getALL_STOPS().get("900") // WB BENNETT ST @ BAKER AV
)) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("900"), // WB BENNETT ST @ BAKER AV
Stops.getALL_STOPS().get("891"), // SB 50 AV @ 22 ST
Stops.getALL_STOPS().get("877") // NB LEVA AV @ LAKE ST
)) //
.compileBothTripSort());
map2.put(12L + ROUTE_ID_ENDS_WITH_A, new RouteTripSpec(12L + ROUTE_ID_ENDS_WITH_A, // 12A
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.NORTH.getId(), //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.SOUTH.getId()) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("623"), // Airport Dr @ Tamarac Bl
Stops.getALL_STOPS().get("950"), // ++
Stops.getALL_STOPS().get("904") // WB BENNETT ST @ BARRETT DR
)) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("900"), // WB BENNETT ST @ BAKER AV
Stops.getALL_STOPS().get("886"), // ++
Stops.getALL_STOPS().get("878"), // ++ Twp Rd 273a @ Petrolia Dr
Stops.getALL_STOPS().get("635"), // ++
Stops.getALL_STOPS().get("623") // Airport Dr @ Tamarac Bl
)) //
.compileBothTripSort());
map2.put(29L, new RouteTripSpec(29L, //
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Notre Dame" + _SLASH_ + "Hunting Hls", //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + " " + "Sorensen Sta") //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // 49 AV @ 48 ST SORENSEN STN
Stops.getALL_STOPS().get("1135"), // != EB 32 ST @ LOCKWOOD AV
Stops.getALL_STOPS().get("1134"), // <> SB LOCKWOOD AV @ 32 ST
Stops.getALL_STOPS().get("1130") // <> WB LEES ST @ LOCKWOOD AV
)) //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1134"), // <> SB LOCKWOOD AV @ 32 ST
Stops.getALL_STOPS().get("1130"), // <> WB LEES ST @ LOCKWOOD AV
Stops.getALL_STOPS().get("1100"), // != NB 30 AV @ COLLICUT CENTRE
Stops.getALL_STOPS().get("1267") // 49 AV @ 48 ST SORENSEN STN
)) //
.compileBothTripSort());
map2.put(50L, new RouteTripSpec(50L, //
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta", //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Edgar Ind") //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1226"), // SB TAYLOR AV @ EDGAR IND DR
Stops.getALL_STOPS().get("763"), // ++ SB TAYLOR DR @ 68 ST
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("988"), // NB 49 AV @ 49 ST
Stops.getALL_STOPS().get("754"), // ++ NB JOHNSTONE DR @ 67 AV
Stops.getALL_STOPS().get("1226") // SB TAYLOR AV @ EDGAR IND DR
)) //
.compileBothTripSort());
map2.put(51L, new RouteTripSpec(51L, //
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta", //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Riverside Ind") // Gaetz Av North
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1083"), // EB 77 ST @ 40 AV
Stops.getALL_STOPS().get("1082"), // SB RIVERSIDE DR @ 77 ST
Stops.getALL_STOPS().get("1069"), // ++
Stops.getALL_STOPS().get("988") // NB 49 AV @ 49 ST
)) //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("993"), // ++
Stops.getALL_STOPS().get("1083") // EB 77 ST @ 40 AV
)) //
.compileBothTripSort());
map2.put(52L, new RouteTripSpec(52L, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Riverside Dr", // "Riverside Ind" // Olymel
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, StringUtils.EMPTY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("701"), // SB 57 AV @ 41 ST
Stops.getALL_STOPS().get("997") // WB RIVERSIDE DR @ 48 AV
)) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Collections.emptyList()) //
.compileBothTripSort());
map2.put(53L, new RouteTripSpec(53L, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Riverside Dr", // "Riverside Ind Pk" // Olymel
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, StringUtils.EMPTY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("798"), // WB HORN ST @ 61 AV
Stops.getALL_STOPS().get("997") // WB RIVERSIDE DR @ 48 AV
)) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Collections.emptyList()) //
.compileBothTripSort());
map2.put(54L, new RouteTripSpec(54L, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Riverside Dr", // "Riverside Ind Pk" // Olymel
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, StringUtils.EMPTY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("911"), // EB IRONSIDE ST @ 40 AV
Stops.getALL_STOPS().get("1081") // NB RIVERSIDE DR @ 76 ST
)) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Collections.emptyList()) //
.compileBothTripSort());
map2.put(100L, new RouteTripSpec(100L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Lacombe", // Blackfalds Express
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Red Deer") // Sorensen Sta
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("1077"), // NB GAETZ AV @ 78 ST
Stops.getALL_STOPS().get("1303") // WB COLLEGE AVE @ 52 ST
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1303"), // WB COLLEGE AVE @ 52 ST
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(101L, new RouteTripSpec(101L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Lacombe", // Blackfalds Express
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Red Deer") // Sorensen Sta
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("1303") // WB COLLEGE AVE @ 52 ST
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1303"), // WB COLLEGE AVE @ 52 ST
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(103L, new RouteTripSpec(103L, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.NORTH.getId(), //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.SOUTH.getId()) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1402"), // 42ND ST @ 51AV
Stops.getALL_STOPS().get("900") // WB BENNETT ST @ BAKER AV
)) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("900"), // WB BENNETT ST @ BAKER AV
Stops.getALL_STOPS().get("1402") // 42ND ST @ 51AV
)) //
.compileBothTripSort());
map2.put(104L, new RouteTripSpec(104L, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.NORTH.getId(), //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.SOUTH.getId()) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1402"), // 42ND ST @ 51AV
Stops.getALL_STOPS().get("900") // WB BENNETT ST @ BAKER AV
)) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("900"), // WB BENNETT ST @ BAKER AV
Stops.getALL_STOPS().get("1402") // 42ND ST @ 51AV
)) //
.compileBothTripSort());
ALL_ROUTE_TRIPS2 = map2;
}
@Override
public int compareEarly(long routeId, List<MTripStop> list1, List<MTripStop> list2, MTripStop ts1, MTripStop ts2, GStop ts1GStop, GStop ts2GStop) {
if (ALL_ROUTE_TRIPS2.containsKey(routeId)) {
return ALL_ROUTE_TRIPS2.get(routeId).compare(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop, this);
}
return super.compareEarly(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop);
}
@Override
public ArrayList<MTrip> splitTrip(MRoute mRoute, GTrip gTrip, GSpec gtfs) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) {
return ALL_ROUTE_TRIPS2.get(mRoute.getId()).getAllTrips();
}
return super.splitTrip(mRoute, gTrip, gtfs);
}
@Override
public Pair<Long[], Integer[]> splitTripStop(MRoute mRoute, GTrip gTrip, GTripStop gTripStop, ArrayList<MTrip> splitTrips, GSpec routeGTFS) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) {
return SplitUtils.splitTripStop(mRoute, gTrip, gTripStop, routeGTFS, ALL_ROUTE_TRIPS2.get(mRoute.getId()), this);
}
return super.splitTripStop(mRoute, gTrip, gTripStop, splitTrips, routeGTFS);
}
@Override
public void setTripHeadsign(MRoute mRoute, MTrip mTrip, GTrip gTrip, GSpec gtfs) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) {
return; // split
}
String tripHeadsign = gTrip.getTripHeadsign();
int directionId = gTrip.getDirectionId() == null ? 0 : gTrip.getDirectionId();
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), directionId);
}
private static final Pattern STARTS_WITH_RSN = Pattern.compile("^[\\d]+[a-zA-Z]? - ", Pattern.CASE_INSENSITIVE);
private static final Pattern STARTS_WITH_INBOUND_DASH = Pattern.compile("^Inbound - ", Pattern.CASE_INSENSITIVE);
@Override
public String cleanTripHeadsign(String tripHeadsign) {
if (Utils.isUppercaseOnly(tripHeadsign, true, true)) {
tripHeadsign = tripHeadsign.toLowerCase(Locale.ENGLISH);
}
tripHeadsign = STARTS_WITH_RSN.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = STARTS_WITH_INBOUND_DASH.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = CleanUtils.cleanNumbers(tripHeadsign);
tripHeadsign = CleanUtils.cleanStreetTypes(tripHeadsign);
return CleanUtils.cleanLabel(tripHeadsign);
}
@Override
public boolean mergeHeadsign(MTrip mTrip, MTrip mTripToMerge) {
MTLog.logFatal("Need to merge trip head-signs: '%s' VS '%s'", mTrip, mTripToMerge);
return false;
}
private static final Pattern INDUSTRIAL = Pattern.compile("((^|\\W)(industrial)(\\W|$))", Pattern.CASE_INSENSITIVE);
private static final String INDUSTRIAL_REPLACEMENT = "$2" + "Ind" + "$4";
private static final Pattern BOUNDS = Pattern.compile("((^|\\W)(sb|nb|eb|wb)(\\W|$))", Pattern.CASE_INSENSITIVE);
@Override
public String cleanStopName(String gStopName) {
gStopName = gStopName.toLowerCase(Locale.ENGLISH);
gStopName = BOUNDS.matcher(gStopName).replaceAll(StringUtils.EMPTY);
gStopName = INDUSTRIAL.matcher(gStopName).replaceAll(INDUSTRIAL_REPLACEMENT);
gStopName = CleanUtils.removePoints(gStopName);
gStopName = CleanUtils.cleanNumbers(gStopName);
gStopName = CleanUtils.cleanStreetTypes(gStopName);
return CleanUtils.cleanLabel(gStopName);
}
@Override
public int getStopId(GStop gStop) {
return Integer.parseInt(gStop.getStopCode()); // use stop code as stop ID
}
}
| src/main/java/org/mtransit/parser/ca_red_deer_transit_bus/RedDeerTransitBusAgencyTools.java | package org.mtransit.parser.ca_red_deer_transit_bus;
import org.apache.commons.lang3.StringUtils;
import org.mtransit.parser.CleanUtils;
import org.mtransit.parser.DefaultAgencyTools;
import org.mtransit.parser.MTLog;
import org.mtransit.parser.Pair;
import org.mtransit.parser.SplitUtils;
import org.mtransit.parser.SplitUtils.RouteTripSpec;
import org.mtransit.parser.Utils;
import org.mtransit.parser.gtfs.data.GCalendar;
import org.mtransit.parser.gtfs.data.GCalendarDate;
import org.mtransit.parser.gtfs.data.GRoute;
import org.mtransit.parser.gtfs.data.GSpec;
import org.mtransit.parser.gtfs.data.GStop;
import org.mtransit.parser.gtfs.data.GTrip;
import org.mtransit.parser.gtfs.data.GTripStop;
import org.mtransit.parser.mt.data.MAgency;
import org.mtransit.parser.mt.data.MDirectionType;
import org.mtransit.parser.mt.data.MInboundType;
import org.mtransit.parser.mt.data.MRoute;
import org.mtransit.parser.mt.data.MTrip;
import org.mtransit.parser.mt.data.MTripStop;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
// https://data.reddeer.ca/gtfsdatafeed
// https://data.reddeer.ca/data/GTFS/RD_GTFS.zip
// OTHER: https://webmap.reddeer.ca/transit/google_transit.zip
public class RedDeerTransitBusAgencyTools extends DefaultAgencyTools {
public static void main(String[] args) {
if (args == null || args.length == 0) {
args = new String[3];
args[0] = "input/gtfs.zip";
args[1] = "../../mtransitapps/ca-red-deer-transit-bus-android/res/raw/";
args[2] = ""; // files-prefix
}
new RedDeerTransitBusAgencyTools().start(args);
}
private HashSet<String> serviceIds;
@Override
public void start(String[] args) {
MTLog.log("Generating Red Deer Transit bus data...");
long start = System.currentTimeMillis();
this.serviceIds = extractUsefulServiceIds(args, this, true);
super.start(args);
MTLog.log("Generating Red Deer Transit bus data... DONE in %s.", Utils.getPrettyDuration(System.currentTimeMillis() - start));
}
@Override
public boolean excludingAll() {
return this.serviceIds != null && this.serviceIds.isEmpty();
}
@Override
public boolean excludeCalendar(GCalendar gCalendar) {
if (this.serviceIds != null) {
return excludeUselessCalendar(gCalendar, this.serviceIds);
}
return super.excludeCalendar(gCalendar);
}
@Override
public boolean excludeCalendarDate(GCalendarDate gCalendarDates) {
if (this.serviceIds != null) {
return excludeUselessCalendarDate(gCalendarDates, this.serviceIds);
}
return super.excludeCalendarDate(gCalendarDates);
}
@Override
public boolean excludeTrip(GTrip gTrip) {
if (this.serviceIds != null) {
return excludeUselessTrip(gTrip, this.serviceIds);
}
return super.excludeTrip(gTrip);
}
@Override
public boolean excludeRoute(GRoute gRoute) {
return super.excludeRoute(gRoute);
}
@Override
public Integer getAgencyRouteType() {
return MAgency.ROUTE_TYPE_BUS;
}
private static final Pattern DIGITS = Pattern.compile("[\\d]+");
private static final String A = "A";
private static final long ROUTE_ID_ENDS_WITH_A = 10_000L;
@Override
public long getRouteId(GRoute gRoute) {
if (Utils.isDigitsOnly(gRoute.getRouteShortName())) {
return Long.parseLong(gRoute.getRouteShortName()); // use route short name as route ID
}
Matcher matcher = DIGITS.matcher(gRoute.getRouteShortName());
if (matcher.find()) {
long id = Long.parseLong(matcher.group());
if (gRoute.getRouteShortName().endsWith(A)) {
return ROUTE_ID_ENDS_WITH_A + id;
}
}
MTLog.logFatal("Unexpected route ID for %s!", gRoute);
return -1L;
}
private static final String YELLOW_SCHOOL_BUS_COLOR = "FFD800";
@SuppressWarnings("DuplicateBranchesInSwitch")
@Override
public String getRouteColor(GRoute gRoute) {
String routeColor = gRoute.getRouteColor();
if (WHITE.equalsIgnoreCase(routeColor)) {
routeColor = null;
}
if (StringUtils.isEmpty(routeColor)) {
if (Utils.isDigitsOnly(gRoute.getRouteShortName())) {
int rsn = Integer.parseInt(gRoute.getRouteShortName());
switch (rsn) {
// @formatter:off
case 1: return "B60000";
case 2: return "32BEBB";
case 3: return "B48ACA";
case 4: return "FE0002";
case 5: return "793E95";
case 6: return "000000";
case 7: return "0000B6";
case 8: return "2DA9AA";
case 9: return "8E8E8E";
case 10: return "E95393";
case 11: return "FFB61A";
case 12: return "217E7D";
case 20: return YELLOW_SCHOOL_BUS_COLOR;
case 21: return YELLOW_SCHOOL_BUS_COLOR;
case 22: return YELLOW_SCHOOL_BUS_COLOR;
case 23: return YELLOW_SCHOOL_BUS_COLOR;
case 24: return YELLOW_SCHOOL_BUS_COLOR;
case 25: return YELLOW_SCHOOL_BUS_COLOR;
case 26: return YELLOW_SCHOOL_BUS_COLOR;
case 27: return YELLOW_SCHOOL_BUS_COLOR;
case 28: return YELLOW_SCHOOL_BUS_COLOR;
case 29: return YELLOW_SCHOOL_BUS_COLOR;
case 30: return YELLOW_SCHOOL_BUS_COLOR;
case 31: return YELLOW_SCHOOL_BUS_COLOR;
case 32: return YELLOW_SCHOOL_BUS_COLOR;
case 33: return YELLOW_SCHOOL_BUS_COLOR;
case 34: return YELLOW_SCHOOL_BUS_COLOR;
case 35: return YELLOW_SCHOOL_BUS_COLOR;
case 36: return YELLOW_SCHOOL_BUS_COLOR;
case 37: return YELLOW_SCHOOL_BUS_COLOR;
case 38: return YELLOW_SCHOOL_BUS_COLOR;
case 39: return YELLOW_SCHOOL_BUS_COLOR;
case 40: return YELLOW_SCHOOL_BUS_COLOR;
case 41: return YELLOW_SCHOOL_BUS_COLOR;
case 50: return "000000";
case 51: return "5DCDF3";
case 52: return "01B601";
case 53: return "FE0000";
case 54: return "6A3683";
case 100: return "016E01";
case 101: return "0000B6";
case 103: return null; // TODO?
case 104: return null; // TODO?
// @formatter:on
}
}
if ("12A".equalsIgnoreCase(gRoute.getRouteShortName())) {
return "AE7B10";
}
if ("35A".equalsIgnoreCase(gRoute.getRouteShortName())) {
return YELLOW_SCHOOL_BUS_COLOR;
}
MTLog.logFatal("Unexpected route color '%s'", gRoute);
return null;
}
return routeColor;
}
private static final Pattern ROUTE_RSN = Pattern.compile("(route [\\d]+[a-zA-Z]?)", Pattern.CASE_INSENSITIVE);
private static final String _SLASH_ = " / ";
@SuppressWarnings("DuplicateBranchesInSwitch")
@Override
public String getRouteLongName(GRoute gRoute) {
String routeLongName = gRoute.getRouteLongName();
routeLongName = cleanRouteLongName(routeLongName);
if (StringUtils.isEmpty(routeLongName)) {
if (Utils.isDigitsOnly(gRoute.getRouteShortName())) {
int rsn = Integer.parseInt(gRoute.getRouteShortName());
switch (rsn) {
// @formatter:off
case 1: return "South Hl" + _SLASH_ + "Inglewood";
case 2: return "Oriole Pk" + _SLASH_ + "Johnstone Pk South";
case 3: return "College" + _SLASH_ + "Anders Pk";
case 4: return "Glendale" + _SLASH_ + "Kentwood (West)";
case 5: return "Rosedale South" + _SLASH_ + "Deer Pk";
case 6: return "Clearview Rdg" + _SLASH_ + "Timberlands";
case 7: return "Morrisroe" + _SLASH_ + "Vanier Woods";
case 8: return "Pines" + _SLASH_ + "Normandeau";
case 9: return "Eastview" + _SLASH_ + "Inglewood";
case 10: return "West Pk" + _SLASH_ + "Gaetz Ave South";
case 11: return "Johnstone Pk (North)" + _SLASH_ + "GH Dawe";
case 12: return "Gasoline Alley";
case 20: return "Lindsay Thurber" + _SLASH_ + "Oriole Pk";
case 21: return "Lindsay Thurber" + _SLASH_ + "Normandeau" + _SLASH_ + "Glendale";
case 22: return "Lindsay Thurber" + _SLASH_ + "Normandeau";
case 23: return "Lindsay Thurber" + _SLASH_ + "Eastview Ests ";
case 24: return "Lindsay Thurber" + _SLASH_ + "East Hl";
case 25: return "Lindsay Thurber" + _SLASH_ + "Johnstone Pk";
case 26: return "Hunting Hls" + _SLASH_ + "West Pk";
case 27: return "Hunting Hls" + _SLASH_ + "Eastview Ests";
case 28: return "Eastview Middle School" + _SLASH_ + "Eastview Ests";
case 29: return "Notre Dame" + _SLASH_ + "Hunting Hls" + _SLASH_ + "City Ctr Sorensen Sta";
case 30: return "City Ctr" + _SLASH_ + "Sorensen Sta";
case 31: return "Saint Joseph School" + _SLASH_ + "City Ctr" + _SLASH_ + "Sorensen Sta";
case 32: return "Central Middle School" + _SLASH_ + "Normandeau" + _SLASH_ + "Timberlands";
case 33: return "Lindsay Thurber" + _SLASH_ + "City Ctr" + _SLASH_ + "Sorensen Sta";
case 34: return "Saint Joseph School" + _SLASH_ + "Normandeau" + _SLASH_ + "Highland Grn";
case 35: return "Central Middle School" + _SLASH_ + "Fairview" + _SLASH_ + "Riverside Mdws";
case 36: return "City Ctr" + _SLASH_ + "Lazy Bus";
case 37: return "Saint Joseph School" + _SLASH_ + "Timberlands" + _SLASH_ + "Rosedale";
case 38: return "Clearview" + _SLASH_ + "Deer" + _SLASH_ + "Timberlands" + _SLASH_ + "Rosedale";
case 39: return "Hunting Hls" + _SLASH_ + "Eastview School" + _SLASH_ + "Morrisroe" + _SLASH_ + "Lancaster";
case 40: return "Saint Joseph School" + _SLASH_ + "Johnstone Pk" + _SLASH_ + "Kentwood";
case 41: return "Saint Joseph School" + _SLASH_ + "Kentwood" + _SLASH_ + "Glendale";
case 50: return "Edgar Ind Pk";
case 51: return "Gaetz Ave North" + _SLASH_ + "Riverside Ind";
case 52: return "Riverside Ind Pk" + _SLASH_ + "Olymel";
case 53: return "Riverside Ind Pk" + _SLASH_ + "Olymel";
case 54: return "Riverside Ind Pk" + _SLASH_ + "Olymel";
case 100: return "Lacombe Blackfalds Express";
case 101: return "Lacombe Blackfalds Local";
case 103: return "Springbrook, Penhold, Innisfail, Bower Mall";
case 104: return "Springbrook, Penhold, Innisfail, Bower Mall";
// @formatter:on
}
}
if ("12A".equalsIgnoreCase(gRoute.getRouteShortName())) {
return "Gasoline Alley" + _SLASH_ + "Springbrook";
}
if ("35A".equalsIgnoreCase(gRoute.getRouteShortName())) {
return "Central Middle School" + _SLASH_ + "Oriole Pk";
}
MTLog.logFatal("Unexpected route long name '%s'", gRoute);
return null;
}
return routeLongName;
}
private String cleanRouteLongName(String routeLongName) {
routeLongName = ROUTE_RSN.matcher(routeLongName).replaceAll(StringUtils.EMPTY);
routeLongName = INDUSTRIAL.matcher(routeLongName).replaceAll(INDUSTRIAL_REPLACEMENT);
routeLongName = CleanUtils.removePoints(routeLongName);
routeLongName = CleanUtils.cleanStreetTypes(routeLongName);
return routeLongName;
}
private static final String AGENCY_COLOR_RED = "BF311A"; // RED (from web site CSS)
private static final String AGENCY_COLOR = AGENCY_COLOR_RED;
@Override
public String getAgencyColor() {
return AGENCY_COLOR;
}
private static HashMap<Long, RouteTripSpec> ALL_ROUTE_TRIPS2;
static {
HashMap<Long, RouteTripSpec> map2 = new HashMap<>();
map2.put(1L, new RouteTripSpec(1L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Inglewood", // South Hl
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("935") // EB VANIER DR @ 30 AV
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("934"), // WB IRONSTONE DR @ 30 AV
Stops.getALL_STOPS().get("962"), // NB 49 AV @ 34 ST
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(2L, new RouteTripSpec(2L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Johnstone Pk", // Oriole Pk
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("788") // EB JEWELL ST @ TAYLOR DR
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("786"), // WB JEWELL ST @ TAYLOR DR
Stops.getALL_STOPS().get("646"), // NB KERRY WOOD DR @ FERN RD
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(3L, new RouteTripSpec(3L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Anders Pk", //
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("1096") // EB AVERY ST @ AMER CL
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1097"), // WB AVERY ST @ 30 AV
Stops.getALL_STOPS().get("734"), // NB 54 AV @ 45 ST
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(4L, new RouteTripSpec(4L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Kentwood", // Glendale
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("795") // WB JORDAN PKY @ TAYLOR DR
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("794"), // EB JORDAN PARKWAY @ STN 5
Stops.getALL_STOPS().get("1058"), // SB GAETZ AV @ VILLAGE MALL
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(5L, new RouteTripSpec(5L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Collicutt Ctr", // "Deer Pk", // Rosedale South
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // == Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("1003"), // != EB 49 ST @ 48 AV
Stops.getALL_STOPS().get("1005"), // == NB 47 AV @ 51 ST
Stops.getALL_STOPS().get("1362"), // == != EB 50 ST @30 AV
Stops.getALL_STOPS().get("1364"), // != WB ROLAND ST @ ROBERTS CR
Stops.getALL_STOPS().get("1366"), // != WB ROLAND ST @ ROBERTS CR
Stops.getALL_STOPS().get("1363"), // != <> NB RUTHERFORD DR @ RUTHERFORD CL
Stops.getALL_STOPS().get("1365"), // != <> EB ROLAND ST @ ROBERTS CR
Stops.getALL_STOPS().get("1173"), // == != SB RIDEOUT AV @REICHLEY ST
Stops.getALL_STOPS().get("1326"), // == SB DAINES @ DUSTON ST
Stops.getALL_STOPS().get("1211"), // != SB LAWFORD AV @ 32 ST
Stops.getALL_STOPS().get("1133"), // != NB LOCKWOOD AV @ LANCASTER DR =>
Stops.getALL_STOPS().get("1155"), // != WB 32 ST @ DAINES AV
Stops.getALL_STOPS().get("1097") // != WB AVERY ST @ 30 AV =>
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1096"), // EB AVERY ST @ AMER CL
Stops.getALL_STOPS().get("1174"), // != NB RIDEOUT AV @50 ST
Stops.getALL_STOPS().get("1365"), // <> EB ROLAND ST @ ROBERTS CR
Stops.getALL_STOPS().get("1363"), // <> NB RUTHERFORD DR @ RUTHERFORD CL
Stops.getALL_STOPS().get("1121"), // != WB 50 ST @ 30 AV
Stops.getALL_STOPS().get("1227"), // WB 55 ST @ 42A AV
Stops.getALL_STOPS().get("1006"), // SB 47 AV @ 55 ST
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(6L, new RouteTripSpec(6L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Garden Hts", // "Clearview Rdg" + _SLASH_ + "Timberlands", //
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("1378"), // EB TIMBERLANDS DR @ 30 AV
Stops.getALL_STOPS().get("1355") // NB GARDEN GT @ GREENWAY ST
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1355"), // NB GARDEN GT @ GREENWAY ST
Stops.getALL_STOPS().get("1243"), // EB TIMOTHY DR @ TOBIN GT
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(7L, new RouteTripSpec(7L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Vanier Woods", // Morrisroe /
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("934") // WB IRONSTONE DR @ 30 AV
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("935"), // EB VANIER DR @ 30 AV
Stops.getALL_STOPS().get("1133"), // NB LOCKWOOD AV @ LANCASTER DR
Stops.getALL_STOPS().get("1020"), // WB 35 ST @ 43 AV
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(8L, new RouteTripSpec(8L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Normandeau", // Pines
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("786") // WB JEWELL ST @ TAYLOR DR
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("788"), // EB JEWELL ST @ TAYLOR DR
Stops.getALL_STOPS().get("1058"), // SB GAETZ AV @ VILLAGE MALL
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(9L, new RouteTripSpec(9L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Inglewood", // Eastview
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("1134"), // SB LOCKWOOD AV @ 32 ST
Stops.getALL_STOPS().get("912") // WB IRONSIDE ST @ INGLIS CR
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("911"), // EB IRONSIDE ST @ 40 AV
Stops.getALL_STOPS().get("1046"), // WB 44 ST @ 40 AV
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(10L, new RouteTripSpec(10L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Inglewood", // West Pk / Gaetz Ave South
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("911") // EB IRONSIDE ST @ 40 AV
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("912"), // WB IRONSIDE ST @ INGLIS CR
Stops.getALL_STOPS().get("897"), // NB 50 AV @ BENNETT
Stops.getALL_STOPS().get("733"), // EB 43 ST @ TAYLOR DR
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(11L, new RouteTripSpec(11L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Johnstone Pk", // GH Dawe
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta") //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("794") // EB JORDAN PARKWAY @ STN 5
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("795"), // WB JORDAN PKY @ TAYLOR DR
Stops.getALL_STOPS().get("760"), // EB HORN ST @ TAYLOR DR
Stops.getALL_STOPS().get("1058"), // SB GAETZ AV @ VILLAGE MALL
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(12L, new RouteTripSpec(12L, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.NORTH.getId(), //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.SOUTH.getId()) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("877"), // NB LEVA AV @ LAKE ST
Stops.getALL_STOPS().get("656"), // NB TAYLOR DR @ 19 ST
Stops.getALL_STOPS().get("900") // WB BENNETT ST @ BAKER AV
)) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("900"), // WB BENNETT ST @ BAKER AV
Stops.getALL_STOPS().get("891"), // SB 50 AV @ 22 ST
Stops.getALL_STOPS().get("877") // NB LEVA AV @ LAKE ST
)) //
.compileBothTripSort());
map2.put(12L + ROUTE_ID_ENDS_WITH_A, new RouteTripSpec(12L + ROUTE_ID_ENDS_WITH_A, // 12A
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.NORTH.getId(), //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.SOUTH.getId()) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("623"), // Airport Dr @ Tamarac Bl
Stops.getALL_STOPS().get("950"), // ++
Stops.getALL_STOPS().get("904") // WB BENNETT ST @ BARRETT DR
)) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("900"), // WB BENNETT ST @ BAKER AV
Stops.getALL_STOPS().get("886"), // ++
Stops.getALL_STOPS().get("878"), // ++ Twp Rd 273a @ Petrolia Dr
Stops.getALL_STOPS().get("635"), // ++
Stops.getALL_STOPS().get("623") // Airport Dr @ Tamarac Bl
)) //
.compileBothTripSort());
map2.put(29L, new RouteTripSpec(29L, //
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Notre Dame" + _SLASH_ + "Hunting Hls", //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + " " + "Sorensen Sta") //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // 49 AV @ 48 ST SORENSEN STN
Stops.getALL_STOPS().get("1135"), // != EB 32 ST @ LOCKWOOD AV
Stops.getALL_STOPS().get("1134"), // <> SB LOCKWOOD AV @ 32 ST
Stops.getALL_STOPS().get("1130") // <> WB LEES ST @ LOCKWOOD AV
)) //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1134"), // <> SB LOCKWOOD AV @ 32 ST
Stops.getALL_STOPS().get("1130"), // <> WB LEES ST @ LOCKWOOD AV
Stops.getALL_STOPS().get("1100"), // != NB 30 AV @ COLLICUT CENTRE
Stops.getALL_STOPS().get("1267") // 49 AV @ 48 ST SORENSEN STN
)) //
.compileBothTripSort());
map2.put(50L, new RouteTripSpec(50L, //
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta", //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Edgar Ind") //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1226"), // SB TAYLOR AV @ EDGAR IND DR
Stops.getALL_STOPS().get("763"), // ++ SB TAYLOR DR @ 68 ST
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("988"), // NB 49 AV @ 49 ST
Stops.getALL_STOPS().get("754"), // ++ NB JOHNSTONE DR @ 67 AV
Stops.getALL_STOPS().get("1226") // SB TAYLOR AV @ EDGAR IND DR
)) //
.compileBothTripSort());
map2.put(51L, new RouteTripSpec(51L, //
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "City Ctr" + _SLASH_ + "Sorensen Sta", //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Riverside Ind") // Gaetz Av North
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1083"), // EB 77 ST @ 40 AV
Stops.getALL_STOPS().get("1082"), // SB RIVERSIDE DR @ 77 ST
Stops.getALL_STOPS().get("1069"), // ++
Stops.getALL_STOPS().get("988") // NB 49 AV @ 49 ST
)) //
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("993"), // ++
Stops.getALL_STOPS().get("1083") // EB 77 ST @ 40 AV
)) //
.compileBothTripSort());
map2.put(52L, new RouteTripSpec(52L, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Riverside Dr", // "Riverside Ind" // Olymel
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, StringUtils.EMPTY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("701"), // SB 57 AV @ 41 ST
Stops.getALL_STOPS().get("997") // WB RIVERSIDE DR @ 48 AV
)) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Collections.emptyList()) //
.compileBothTripSort());
map2.put(53L, new RouteTripSpec(53L, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Riverside Dr", // "Riverside Ind Pk" // Olymel
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, StringUtils.EMPTY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("798"), // WB HORN ST @ 61 AV
Stops.getALL_STOPS().get("997") // WB RIVERSIDE DR @ 48 AV
)) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Collections.emptyList()) //
.compileBothTripSort());
map2.put(54L, new RouteTripSpec(54L, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Riverside Dr", // "Riverside Ind Pk" // Olymel
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_STRING, StringUtils.EMPTY) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("911"), // EB IRONSIDE ST @ 40 AV
Stops.getALL_STOPS().get("1081") // NB RIVERSIDE DR @ 76 ST
)) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Collections.emptyList()) //
.compileBothTripSort());
map2.put(100L, new RouteTripSpec(100L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Lacombe", // Blackfalds Express
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Red Deer") // Sorensen Sta
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("1077"), // NB GAETZ AV @ 78 ST
Stops.getALL_STOPS().get("1303") // WB COLLEGE AVE @ 52 ST
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1303"), // WB COLLEGE AVE @ 52 ST
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(101L, new RouteTripSpec(101L, //
MInboundType.OUTBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Lacombe", // Blackfalds Express
MInboundType.INBOUND.intValue(), MTrip.HEADSIGN_TYPE_STRING, "Red Deer") // Sorensen Sta
.addTripSort(MInboundType.OUTBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1267"), // Sorensen Station 49 AV @ 48 ST
Stops.getALL_STOPS().get("1303") // WB COLLEGE AVE @ 52 ST
)) //
.addTripSort(MInboundType.INBOUND.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1303"), // WB COLLEGE AVE @ 52 ST
Stops.getALL_STOPS().get("1267") // Sorensen Station 49 AV @ 48 ST
)) //
.compileBothTripSort());
map2.put(103L, new RouteTripSpec(103L, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.NORTH.getId(), //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.SOUTH.getId()) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1402"), // 42ND ST @ 51AV
Stops.getALL_STOPS().get("900") // WB BENNETT ST @ BAKER AV
)) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("900"), // WB BENNETT ST @ BAKER AV
Stops.getALL_STOPS().get("1402") // 42ND ST @ 51AV
)) //
.compileBothTripSort());
map2.put(104L, new RouteTripSpec(104L, //
MDirectionType.NORTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.NORTH.getId(), //
MDirectionType.SOUTH.intValue(), MTrip.HEADSIGN_TYPE_DIRECTION, MDirectionType.SOUTH.getId()) //
.addTripSort(MDirectionType.NORTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("1402"), // 42ND ST @ 51AV
Stops.getALL_STOPS().get("900") // WB BENNETT ST @ BAKER AV
)) //
.addTripSort(MDirectionType.SOUTH.intValue(), //
Arrays.asList(//
Stops.getALL_STOPS().get("900"), // WB BENNETT ST @ BAKER AV
Stops.getALL_STOPS().get("1402") // 42ND ST @ 51AV
)) //
.compileBothTripSort());
ALL_ROUTE_TRIPS2 = map2;
}
@Override
public int compareEarly(long routeId, List<MTripStop> list1, List<MTripStop> list2, MTripStop ts1, MTripStop ts2, GStop ts1GStop, GStop ts2GStop) {
if (ALL_ROUTE_TRIPS2.containsKey(routeId)) {
return ALL_ROUTE_TRIPS2.get(routeId).compare(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop, this);
}
return super.compareEarly(routeId, list1, list2, ts1, ts2, ts1GStop, ts2GStop);
}
@Override
public ArrayList<MTrip> splitTrip(MRoute mRoute, GTrip gTrip, GSpec gtfs) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) {
return ALL_ROUTE_TRIPS2.get(mRoute.getId()).getAllTrips();
}
return super.splitTrip(mRoute, gTrip, gtfs);
}
@Override
public Pair<Long[], Integer[]> splitTripStop(MRoute mRoute, GTrip gTrip, GTripStop gTripStop, ArrayList<MTrip> splitTrips, GSpec routeGTFS) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) {
return SplitUtils.splitTripStop(mRoute, gTrip, gTripStop, routeGTFS, ALL_ROUTE_TRIPS2.get(mRoute.getId()), this);
}
return super.splitTripStop(mRoute, gTrip, gTripStop, splitTrips, routeGTFS);
}
@Override
public void setTripHeadsign(MRoute mRoute, MTrip mTrip, GTrip gTrip, GSpec gtfs) {
if (ALL_ROUTE_TRIPS2.containsKey(mRoute.getId())) {
return; // split
}
String tripHeadsign = gTrip.getTripHeadsign();
int directionId = gTrip.getDirectionId() == null ? 0 : gTrip.getDirectionId();
mTrip.setHeadsignString(cleanTripHeadsign(tripHeadsign), directionId);
}
private static final Pattern STARTS_WITH_RSN = Pattern.compile("^[\\d]+[a-zA-Z]? - ", Pattern.CASE_INSENSITIVE);
private static final Pattern STARTS_WITH_INBOUND_DASH = Pattern.compile("^Inbound - ", Pattern.CASE_INSENSITIVE);
@Override
public String cleanTripHeadsign(String tripHeadsign) {
if (Utils.isUppercaseOnly(tripHeadsign, true, true)) {
tripHeadsign = tripHeadsign.toLowerCase(Locale.ENGLISH);
}
tripHeadsign = STARTS_WITH_RSN.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = STARTS_WITH_INBOUND_DASH.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY);
tripHeadsign = CleanUtils.cleanNumbers(tripHeadsign);
tripHeadsign = CleanUtils.cleanStreetTypes(tripHeadsign);
return CleanUtils.cleanLabel(tripHeadsign);
}
@Override
public boolean mergeHeadsign(MTrip mTrip, MTrip mTripToMerge) {
MTLog.logFatal("Need to merge trip head-signs: '%s' VS '%s'", mTrip, mTripToMerge);
return false;
}
private static final Pattern INDUSTRIAL = Pattern.compile("((^|\\W)(industrial)(\\W|$))", Pattern.CASE_INSENSITIVE);
private static final String INDUSTRIAL_REPLACEMENT = "$2" + "Ind" + "$4";
private static final Pattern BOUNDS = Pattern.compile("((^|\\W)(sb|nb|eb|wb)(\\W|$))", Pattern.CASE_INSENSITIVE);
@Override
public String cleanStopName(String gStopName) {
gStopName = gStopName.toLowerCase(Locale.ENGLISH);
gStopName = BOUNDS.matcher(gStopName).replaceAll(StringUtils.EMPTY);
gStopName = INDUSTRIAL.matcher(gStopName).replaceAll(INDUSTRIAL_REPLACEMENT);
gStopName = CleanUtils.removePoints(gStopName);
gStopName = CleanUtils.cleanNumbers(gStopName);
gStopName = CleanUtils.cleanStreetTypes(gStopName);
return CleanUtils.cleanLabel(gStopName);
}
@Override
public int getStopId(GStop gStop) {
return Integer.parseInt(gStop.getStopCode()); // use stop code as stop ID
}
}
| Update git submodule
| src/main/java/org/mtransit/parser/ca_red_deer_transit_bus/RedDeerTransitBusAgencyTools.java | Update git submodule |
|
Java | apache-2.0 | 1ae042a0742397072ed243a8256bab57b34e2758 | 0 | dhmay/msInspect,dhmay/msInspect,dhmay/msInspect | /*
* Copyright (c) 2003-2007 Fred Hutchinson Cancer Research Center
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fhcrc.cpl.toolbox;
import org.apache.log4j.Logger;
import org.fhcrc.cpl.toolbox.ApplicationContext;
import java.io.*;
import java.util.*;
/**
* A class with generic methods for interfacing with R. Hides all kinds of exceptions, both
* from R and from file IO
*/
public class RInterface
{
private static Logger _log = Logger.getLogger(RInterface.class);
protected static int expressionNumber = 0;
//maximum number of milliseconds to wait for a response from R before giving up
protected static final int DEFAULT_MAX_R_WAIT_MILLIS = 5000;
//number of milliseconds to sleep between checks for response from R
protected static final int R_SLEEP_INCREMENT_MILLIS = 50;
/**
* Choose the right name of the command for running R for this OS
* @return
*/
protected static String getRCommandStringForOS()
{
String os = System.getProperty("os.name");
String cmd;
if (os.contains("Windows"))
cmd = "RCMD";
else
cmd = "R CMD";
return cmd;
}
/**
* Run an R script, setting the R directory to be the temp dir.
* Also mark the Rout file generated by R for deletion when tempfiles for the caller
* are cleaned up (don't want to do it here because a user might want to view the Rout file).
*
* This is the easy and stable way to invoke R. The alternative is to interact with
* the input, output, and error streams of the process, which provides some advantages.
* @param rScriptFile
* @return true if successful, false otherwise
*/
public static boolean runRScript(File rScriptFile, Object caller)
throws RuntimeException
{
//assume failure
boolean success = false;
String rScriptFilepath = rScriptFile.getAbsolutePath();
try
{
String cmd = getRCommandStringForOS();
cmd = cmd + " BATCH --slave " + rScriptFilepath;
_log.debug("Before runing R, script file " + rScriptFilepath);
Process p = Runtime.getRuntime().exec(cmd ,null, TempFileManager.getTmpDir());
_log.debug("after running R");
int err = p.waitFor();
_log.debug("process returned, "+err);
if (err == 0)
success = true;
else
{
TempFileManager.unmarkFileForDeletion(rScriptFile, caller);
throw new RuntimeException("Error in executing R, temp dir is " + TempFileManager.getTmpDir() + ". R process status: " + err, null);
}
//Only try to delete the out file if we successfully ran the script
TempFileManager.markFileForDeletion(new File(rScriptFile.getAbsolutePath() + "out"), caller);
}
catch (Exception e)
{
throw new RuntimeException("Failed to run R code. Details follow.\n" +
"Please make sure that R is a) installed and b) on your PATH. To do this,\n" +
"open a command prompt window and type R (on Linux) or RCMD (on Windows)\n" +
"If the R command is not found, your PATH environment variable needs to be\n" +
"modified to contain the R binary directory.",e);
}
return success;
}
/**
* First index in response indicates row. Second indicates column.
* @param rResponse
* @return
*/
public static float[][] processRMatrixResponse(String rResponse)
{
String[] textLines = rResponse.split("\n");
int numRows = textLines.length-1;
String firstLine = textLines[0];
int numCols=0;
int lastWhitespaceFirstLine=0;
while (Character.isWhitespace(firstLine.charAt(lastWhitespaceFirstLine)))
lastWhitespaceFirstLine++;
firstLine=firstLine.substring(lastWhitespaceFirstLine);
String[] columnNames = firstLine.split("\\s+");
numCols = columnNames.length;
_log.debug("processRMatrixResponse, first line: " + firstLine + ", numCols=" + numCols);
float[][] result = new float[numRows][numCols];
for (int i=0; i<numRows; i++)
{
String line = textLines[i+1];
line=line.substring(line.indexOf("]")+1);
int lastWhitespace=0;
while (Character.isWhitespace(line.charAt(lastWhitespace)))
lastWhitespace++;
line=line.substring(lastWhitespace);
String[] entries = line.split("\\s+");
// int numEmpties=0;
// if (entries[0] == null || entries[0].length()==0)
// numEmpties++;
for (int j=0; j<numCols; j++)
{
try
{
result[i][j] = Float.parseFloat(entries[j]);
}
catch (RuntimeException e)
{
ApplicationContext.infoMessage("FAILED while processing line " + i + ": **" + line + "**, specifically the value **" + entries[j] + "**, entry " + j);
throw e;
}
}
}
return result;
}
/**
* cute little method that handles a n R coefficients() response by ignoring the first line
* and splitting the second around whitespace. Brittle, don't give it anything at all funky
* @param rResponse
* @return
*/
public static double[] processRCoefficientResponse(String rResponse)
throws NumberFormatException
{
_log.debug("Parsing R response:\n***\n"+rResponse+"\n***\n");
List<Float> resultList = new ArrayList<Float>();
String justTheNumbers = rResponse;
if (justTheNumbers.contains("\n"))
justTheNumbers = justTheNumbers.substring(rResponse.indexOf('\n'));
//R responses can get split over multiple lines. In this case, that means
//alternating lines of header, values
String[] textLines = rResponse.split("\n");
if (textLines.length > 2)
{
_log.debug("Multiple (" + textLines.length + ") lines in R response");
justTheNumbers = textLines[1];
for (int i=3; i<textLines.length; i+=2)
justTheNumbers = justTheNumbers + textLines[i];
}
_log.debug("just the numbers:\n***\n"+justTheNumbers+"\n***\n");
List<Float> coeffsThisLine = processCoefficientOnlyLine(justTheNumbers);
for (float coeff : coeffsThisLine)
resultList.add(coeff);
if (resultList.size() < 2)
throw new NumberFormatException("Problem parsing coefficient response from R");
double[] result = new double[resultList.size()];
for (int i=0; i<result.length; i++)
result[i] = resultList.get(i);
return result;
}
/**
* Parse the important bits of a response from R's 'coeff' command
* @param coefficientLine
* @return
*/
protected static List<Float> processCoefficientOnlyLine(String coefficientLine)
{
List<Float> result = new ArrayList<Float>();
String[] stringCoeffs = coefficientLine.split("\\s");
for (int i=0; i<stringCoeffs.length; i++)
{
_log.debug("@@@"+stringCoeffs[i]+"@@@");
if (stringCoeffs[i].length() > 0)
{
result.add(Float.parseFloat(stringCoeffs[i]));
}
}
return result;
}
public static Map<String, String> extractVariableStringsFromListOutput(String listOutput)
{
_log.debug("Extracting variable strings from list output...");
String[] lines = listOutput.split("\\n");
Map<String, String> result = new HashMap<String, String>();
StringBuffer currentVarBuf = null;
String currentVarName = null;
for (String line : lines)
{
line=line.trim();
if (line.startsWith("$"))
{
if (currentVarName != null)
{
_log.debug("extractVarStrings: found var " + currentVarName);
result.put(currentVarName, currentVarBuf.toString());
}
currentVarName = line.substring(1);
currentVarBuf = new StringBuffer();
}
else
{
if (currentVarName != null)
currentVarBuf.append("\n" + line);
}
}
result.put(currentVarName, currentVarBuf.toString());
return result;
}
/**
* Evaluate an R command, giving it the default amount of time to respond
* @param rCommand command to execute
* @return results from R
*/
public static String evaluateRCommand(String rCommand)
{
return evaluateRCommand(rCommand, DEFAULT_MAX_R_WAIT_MILLIS);
}
/**
* Talks to the ReaderThread to check periodically for the specified sentinel.
* TODO: make this more efficient. Right now, at every read, it pulls back the whole response
* @param readerThread
* @param endResponseSentinel
* @param maxMillisToWaitForResponse
* @return
* @throws IOException
*/
protected static String collectInput(RReaderThread readerThread,
Process p,
String endResponseSentinel,
int maxMillisToWaitForResponse)
throws IOException
{
int totalMillisSlept = 0;
String responseString = "";
// _log.debug("collectInput, millis to wait: " + maxMillisToWaitForResponse);
//loop until sentinel shows up. Likely we'll get it right away, but
//loop anyway just in case R is slow, which it can be
responseString = readerThread.getReadString();
//every time we get more back from R, we convert the whole thing to a String.
//This is wasteful, but it's necessary to make ABSOLUTELY SURE we capture
//the sentinel if it occurs. Besides, likely we won't do this more than once
//or maybe twice on typical commands
while (readerThread.status == RReaderThread.STATUS_READING &&
(endResponseSentinel == null ||
!(responseString).contains(endResponseSentinel)))
{
boolean processIsAlive = false;
int exitValue = 0;
try
{
exitValue = p.exitValue();
}
catch (IllegalThreadStateException itse)
{
processIsAlive = true;
}
if (!processIsAlive)
{
StringBuffer exceptionMessageBuf = new StringBuffer("R Process exited before done reading, with status " + exitValue + ".\n");
if (responseString.length() < 5000)
{
exceptionMessageBuf.append("Output from process: " + responseString);
}
else
{
exceptionMessageBuf.append("Output from process is too long to display (" +
responseString.length() + " chars)");
}
throw new IOException(exceptionMessageBuf.toString());
}
//System.err.println(" loop, " + responseString);
if (totalMillisSlept > maxMillisToWaitForResponse)
{
break;
}
//sleep for one increment
try
{
Thread.sleep(R_SLEEP_INCREMENT_MILLIS);
totalMillisSlept += R_SLEEP_INCREMENT_MILLIS;
}
catch (InterruptedException e)
{
}
responseString = readerThread.getReadString();
}
_log.debug("collectInput, readerThread status: " + readerThread.status);
if (readerThread.status == RReaderThread.STATUS_ERROR)
throw readerThread.exception;
return responseString;
}
/**
* Handles a single write of a byte array to R
*/
public static class RWriterThread extends Thread
{
OutputStream out = null;
byte[] bytes = null;
public boolean done = false;
public RWriterThread(Process rProcess, byte[] bytes)
{
out = new DataOutputStream(new BufferedOutputStream(rProcess.getOutputStream(), 8000));
this.bytes = bytes;
}
public void run()
{
try
{
for (int i = 0; i < bytes.length; i++)
{
out.write(bytes[i]);
out.flush();
}
} catch (Throwable t)
{
t.printStackTrace(System.out);
}
done = true;
}
}
/**
* Manages the R error inputstream.
* On some systems, the error inputstream must be read periodically or writing to the
* outputstream will hang. In particular, I've seen output writing hang after 8kb
* are written; apparently something gets written to the error outputstream during that
* write, and unless it's written, nothing doing.
*/
public static class RErrorReaderThread extends Thread
{
protected InputStream inputStream = null;
protected StringBuffer accumulatedResponse = new StringBuffer();
protected StringBuffer newDataBuf = new StringBuffer();
protected boolean keepReading = true;
public IOException exception = null;
public int status = STATUS_READING;
public static final int STATUS_READING = 0;
public static final int STATUS_ERROR = 1;
protected boolean hasNewData = false;
public RErrorReaderThread(Process p)
{
inputStream = new BufferedInputStream(p.getErrorStream());
}
public void run()
{
try
{
_log.debug("Starting error thread");
while(keepReading)
{
int currentBytesAvailable = inputStream.available();
if (currentBytesAvailable > 0)
{
byte[] rResponse = new byte[currentBytesAvailable];
inputStream.read(rResponse);
_log.debug("R ERROR reader got output: " + new String(rResponse));
accumulatedResponse.append(new String(rResponse));
hasNewData = true;
newDataBuf.append(new String(rResponse));
}
//sleep for one increment
try
{
Thread.sleep(R_SLEEP_INCREMENT_MILLIS);
}
catch (InterruptedException e)
{
_log.debug("error thread interrupted");
}
}
if (inputStream != null)
inputStream.close();
_log.debug("Error reader successfully shutdown");
}
catch (IOException e)
{
_log.error("Failure while reading R response", e);
status = STATUS_ERROR;
exception = e;
}
}
public void shutdown()
{
keepReading = false;
}
public String getReadString()
{
return accumulatedResponse.toString();
}
public boolean hasNewData()
{
return hasNewData;
}
public String getNewData()
{
String result = newDataBuf.toString();
newDataBuf = new StringBuffer();
return result;
}
}
/**
* Latches onto the R process' input stream and doesn't let go
* TODO: provide a way to get access to just whatever was read since last time you checked
*/
public static class RReaderThread extends Thread
{
InputStream inputStream = null;
StringBuffer accumulatedResponse = new StringBuffer();
protected boolean keepReading = true;
public int totalMillisSlept = 0;
public IOException exception = null;
public int status = STATUS_READING;
public static final int STATUS_READING = 0;
public static final int STATUS_ERROR = 1;
public RReaderThread(Process p)
{
inputStream = p.getInputStream();
}
public void run()
{
try
{
_log.debug("Starting R output reader thread");
while(keepReading)
{
int currentBytesAvailable = inputStream.available();
if (currentBytesAvailable > 0)
{
byte[] rResponse = new byte[currentBytesAvailable];
inputStream.read(rResponse);
String responseString = new String(rResponse);
_log.debug("R output reader got output: " + responseString);
accumulatedResponse.append(responseString);
//System.err.println(new String(rResponse));
}
//sleep for one increment
try
{
Thread.sleep(R_SLEEP_INCREMENT_MILLIS);
totalMillisSlept += R_SLEEP_INCREMENT_MILLIS;
}
catch (InterruptedException e)
{
}
}
if (inputStream != null)
inputStream.close();
_log.debug("Reader successfully shutdown");
}
catch (IOException e)
{
_log.error("Failure while reading R response", e);
status = STATUS_ERROR;
exception = e;
}
}
public void shutdown()
{
keepReading = false;
}
public String getReadString()
{
return accumulatedResponse.toString();
}
}
/**
* Cover method to start up a writer thread, send it some bytes, and make sure they got written
* @param p
* @param bytesToWrite
* @throws IOException
*/
public static void writeToR(Process p, byte[] bytesToWrite)
throws IOException
{
RWriterThread wt = new RWriterThread(p, bytesToWrite);
wt.start();
while (!wt.done)
{
try
{
Thread.sleep(15);
}
catch(InterruptedException ie)
{
}
}
}
/**
* Read in a fully-qualified resource on the classpath, i.e., an R script
* @param resourcePath
* @return
*/
public static String readResourceFile(String resourcePath)
throws IOException
{
_log.debug("readResourceFile, resourcePath: " + resourcePath);
InputStream in = RInterface.class.getResourceAsStream(resourcePath);
if (in == null)
throw new IOException("ERROR!! null resource!");
StringBuffer commandBuf = new StringBuffer();
int readchar;
while ((readchar = in.read()) != -1)
commandBuf.append((char) readchar);
return commandBuf.toString();
}
/**
* Evaluate an R command, or series of commands. Time out if we wait longer than maxMillis...
* Return a every single thing that R gives back, with whitespace trimmed from start and end.
* In order to make sure we wait the appropriate amount of time, and to make sure that we return
* only R's response and nothing else, I do the following:
*
* 1. Place a sentinel that will be echoed before the command response
* 2. Place a sentinel (that can be evaluated by R) after a newline, so that R will echo it after
* the command completes. That way we know to ignore everything after the second sentinel, and we know when
* we're done
* 3. When we get back the response, take everything between the two sentinel responses. Then, if that
* response contains any "Package loaded" lines, take them out, too
*
* If R fails for any reason, throw a RuntimeException
* @param rCommand
* @param maxMillisToWaitForResponse
* @return the result from R
*/
public static String evaluateRCommand(String rCommand, int maxMillisToWaitForResponse)
{
_log.debug("Running R command:");
// _log.debug(rCommand);
while(Character.isWhitespace(rCommand.charAt(rCommand.length()-1)))
rCommand = rCommand.substring(0, rCommand.length()-1);
long startTime = new Date().getTime();
String result = null;
String responseString = "";
boolean timedOut = false;
Process p = null;
RReaderThread responseReaderThread = null;
RErrorReaderThread errorReaderThread = null;
try
{
_log.debug("Starting R...");
String cmd = "R --vanilla --slave";
//Kick off R, set up the input and output streams, write the full command and sentinels
p = Runtime.getRuntime().exec(cmd, null, TempFileManager.getTmpDir());
_log.debug("R process started.");
//this is necessary for Windows. R initially produces some output
//when you kick it off, and if you don't collect it Windows hangs
//eternally.
responseReaderThread = new RReaderThread(p);
responseReaderThread.start();
errorReaderThread = new RErrorReaderThread(p);
errorReaderThread.start();
byte[] bytesToR = rCommand.getBytes();
_log.debug("Sending command to R. " + bytesToR.length + " bytes....");
//System.err.println("****************");
//System.err.println(new String(bytesToR));
//System.err.println("****************");
String sentinel1 = "\"SENTINEL_SUPERCaliFRAGILIsticEXPIAlidOCIOUS1_SENTINEL\"";
String sentinel2 = "\"SENTINEL_SUPERCaliFRAGILIsticEXPIAlidOCIOUS2_SENTINEL\"";
writeToR(p, ("\n" + sentinel1 + '\n').getBytes());
writeToR(p, bytesToR);
writeToR(p, ("\n" + sentinel2 + '\n').getBytes());
_log.debug("Sent command to R.");
//read from the input stream until we come to the end-command sentinel,
//which will be after the echo of our input but before the response.
//Reduce the max time to wait by however long we've already waited.
_log.debug("Waiting for output...");
//read from the input stream until we come to the second sentinel,
//which will be echoed after we get our response.
//Reduce the max time to wait by however long we've already waited.
responseString = collectInput(responseReaderThread, p, sentinel2,
(int) ((maxMillisToWaitForResponse) -
(new Date().getTime() - startTime)));
if (responseString == null ||
!responseString.contains(sentinel2))
{
// _log.debug(responseString);
if (new Date().getTime() - startTime > maxMillisToWaitForResponse)
{
timedOut = true;
throw new RuntimeException("timed out");
}
else
{
throw new RuntimeException("unknown error, didn't get sentinel");
}
}
//System.err.println("Raw R response: " + responseString);
_log.debug("Got sentinel. Response length: " + responseString.length());
//at this point we've both sentinels.
//Get rid of the last line, which is the sentinel response
int startIndex = responseString.indexOf(sentinel1) + sentinel1.length();
while (Character.isWhitespace(responseString.charAt(startIndex)))
startIndex++;
int firstBadIndex = responseString.indexOf(sentinel2);
while (responseString.charAt(firstBadIndex) != '\n')
firstBadIndex--;
result = responseString.substring(startIndex, firstBadIndex);
//We may get "package loaded" or "null device" lines. If so, ignore them
while (result.startsWith("Package") || result.startsWith("null device"))
{
String firstLine = result.substring(0, result.indexOf("\n"));
if (result.startsWith("null device") || firstLine.contains("loaded."))
{
result = result.substring(firstLine.length() + 1);
}
else
break;
}
_log.debug("Important part of response (length " + result.length() + "), with whitespace: " + result);
//_log.debug(result);
//strip whitespace from beginning and end
while (Character.isWhitespace(result.charAt(0)))
result = result.substring(1);
while (Character.isWhitespace(result.charAt(result.length()-1)))
result = result.substring(0,result.length()-1);
_log.debug("Stripped whitespace from response");
}
catch (Exception e)
{
String failureReason = "";
if (timedOut)
{
failureReason = "Timed out while calling R. Max millis: " + maxMillisToWaitForResponse +
", waited " + (new Date().getTime() - startTime);
}
else
{
failureReason = "Error calling R, temp dir is " + TempFileManager.getTmpDir() + ". ";
try
{
if (responseReaderThread != null)
{
failureReason += failureReason + "R output:\n" +
collectInput(responseReaderThread, p, null, 100);
}
else
failureReason += "No error output from R to display. Error Message: " + e.getMessage() + ", Exception class: " + e.getClass().getName();
}
catch (Exception ex)
{
failureReason += "Failed while interrogating R error output";
}
finally
{
if (errorReaderThread.hasNewData())
failureReason = failureReason + "\n Error Output: " + errorReaderThread.getNewData();
e.printStackTrace(System.err);
}
}
throw new RuntimeException(failureReason,e);
}
finally
{
//close all the input and output streams, kill the process
if (responseReaderThread != null)
responseReaderThread.shutdown();
if (errorReaderThread != null)
errorReaderThread.shutdown();
//Give the threads a chance to shut down before destroying the process
try
{
Thread.sleep(10);
}
catch (InterruptedException e)
{
}
try
{
if (p != null)
p.destroy();
}
catch (Exception e)
{
_log.debug("Failed to close R process. How sad.");
}
}
return result;
}
/**
* cover method with default wait time, vector variables only
* @param rExpression
* @param variableValues
* @param dependentPackageNames
* @return
*/
public static String evaluateRExpression(String rExpression,
Map<String, double[]> variableValues,
String[] dependentPackageNames)
{
return evaluateRExpression(rExpression, variableValues,
dependentPackageNames, DEFAULT_MAX_R_WAIT_MILLIS);
}
/**
* Generic method for running an R expression and feeding the output to a file.
* Populates variable values and loads packages if necessary
* @param rExpression
* @return
*/
public static String evaluateRExpression(String rExpression,
Map<String, double[]> variableValues,
String[] dependentPackageNames,
int maxMillisToWaitForResponse)
{
return evaluateRExpression(rExpression, variableValues, null,
dependentPackageNames, maxMillisToWaitForResponse);
}
/**
* Cover method with default wait time
* @param rExpression
* @param vectorVariableValues
* @param matrixVariableValues
* @param dependentPackageNames
* @return
*/
public static String evaluateRExpression(String rExpression,
Map<String, double[]> vectorVariableValues,
Map<String, double[][]> matrixVariableValues,
String[] dependentPackageNames)
{
return evaluateRExpression(rExpression, vectorVariableValues, matrixVariableValues,
dependentPackageNames, DEFAULT_MAX_R_WAIT_MILLIS);
}
public static String evaluateRExpression(String rExpression,
Map<String, double[]> vectorVariableValues,
Map<String, double[][]> matrixVariableValues,
String[] dependentPackageNames,
int maxMillisToWaitForResponse)
{
return evaluateRExpression(rExpression, null, vectorVariableValues, matrixVariableValues,
dependentPackageNames, maxMillisToWaitForResponse);
}
/**
* R doesn't like backslashes, even on Windows. So replace them with forward slashes
* @param file
* @return
*/
public static String generateRFriendlyPath(File file)
{
String filePath = file.getAbsolutePath();
filePath = filePath.replaceAll("\\\\","/");
return filePath;
}
/**
* Generic method for running an R expression and feeding the output to a file.
* Populates variable values and loads packages if necessary
* @param rExpression
* @return
*/
public static String evaluateRExpression(String rExpression,
Map<String, Object> scalarVariableValues,
Map<String, double[]> vectorVariableValues,
Map<String, double[][]> matrixVariableValues,
String[] dependentPackageNames,
int maxMillisToWaitForResponse)
{
StringBuffer rCommand = new StringBuffer();
if (dependentPackageNames != null && dependentPackageNames.length > 0)
{
for (String dependentPackageName : dependentPackageNames)
{
rCommand.append("library('" + dependentPackageName + "')\n");
}
}
if (scalarVariableValues != null && scalarVariableValues.size() > 0)
{
for (String variableName : scalarVariableValues.keySet())
{
rCommand.append(variableName + "<-" + scalarVariableValues.get(variableName) + "\n");
}
}
if (vectorVariableValues != null && vectorVariableValues.size() > 0)
{
for (String variableName : vectorVariableValues.keySet())
{
rCommand.append(variableName + "<-c(");
double[] thisVarValues = vectorVariableValues.get(variableName);
for (int i=0; i<thisVarValues.length; i++)
{
rCommand.append(thisVarValues[i]);
if (i < thisVarValues.length-1)
rCommand.append(",\n");
}
rCommand.append(")\n");
}
}
if (matrixVariableValues != null && matrixVariableValues.size() > 0)
{
for (String variableName : matrixVariableValues.keySet())
{
rCommand.append(variableName + "<-matrix(c(");
double[][] thisVarValues = matrixVariableValues.get(variableName);
for (int i=0; i<thisVarValues.length; i++)
{
for (int j=0; j < thisVarValues[0].length; j++)
{
rCommand.append(thisVarValues[i][j]);
//append the comma unless we're on the very last cell
if (i < thisVarValues.length-1 ||
j < thisVarValues[0].length-1)
rCommand.append(",");
//this is just formatting, for debug purposes
if (j == thisVarValues[0].length-1)
rCommand.append("\n");
}
}
rCommand.append("),nrow=" + thisVarValues.length +
",ncol="+thisVarValues[0].length + ", byrow=TRUE)\n");
}
}
rCommand.append(rExpression);
return evaluateRCommand(rCommand.toString(), maxMillisToWaitForResponse);
}
/**
* Creates a file containing the values to be loaded into the R variable with name variableName,
* and returns the R syntax necessary to load the values into the variable.
* If not successful, throws an exception
* @param variableName
* @param variableValues
* @return
*/
/* This was useful when everything was file-based. Not so much any more
protected static String populateArrayValueFromFile(String variableName, double[] variableValues,
Object caller)
throws FileNotFoundException
{
File tempVarValueFile =
TempFileManager.createTempFile("RInterface.populateArrayValueFromFile.var." + variableName,
caller);
PrintWriter pw = null;
try
{
pw = new PrintWriter(tempVarValueFile);
for (double variableValue : variableValues)
pw.println(variableValue);
}
finally
{
if (pw != null)
pw.close();
}
return variableName + "<-read.table('" + tempVarValueFile.getAbsolutePath() +"',header=FALSE)[,1]";
}
*/
}
| src/org/fhcrc/cpl/toolbox/RInterface.java | /*
* Copyright (c) 2003-2007 Fred Hutchinson Cancer Research Center
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fhcrc.cpl.toolbox;
import org.apache.log4j.Logger;
import org.fhcrc.cpl.toolbox.ApplicationContext;
import java.io.*;
import java.util.*;
/**
* A class with generic methods for interfacing with R. Hides all kinds of exceptions, both
* from R and from file IO
*/
public class RInterface
{
private static Logger _log = Logger.getLogger(RInterface.class);
protected static int expressionNumber = 0;
//maximum number of milliseconds to wait for a response from R before giving up
protected static final int DEFAULT_MAX_R_WAIT_MILLIS = 5000;
//number of milliseconds to sleep between checks for response from R
protected static final int R_SLEEP_INCREMENT_MILLIS = 50;
/**
* Choose the right name of the command for running R for this OS
* @return
*/
protected static String getRCommandStringForOS()
{
String os = System.getProperty("os.name");
String cmd;
if (os.contains("Windows"))
cmd = "RCMD";
else
cmd = "R CMD";
return cmd;
}
/**
* Run an R script, setting the R directory to be the temp dir.
* Also mark the Rout file generated by R for deletion when tempfiles for the caller
* are cleaned up (don't want to do it here because a user might want to view the Rout file).
*
* This is the easy and stable way to invoke R. The alternative is to interact with
* the input, output, and error streams of the process, which provides some advantages.
* @param rScriptFile
* @return true if successful, false otherwise
*/
public static boolean runRScript(File rScriptFile, Object caller)
throws RuntimeException
{
//assume failure
boolean success = false;
String rScriptFilepath = rScriptFile.getAbsolutePath();
try
{
String cmd = getRCommandStringForOS();
cmd = cmd + " BATCH --slave " + rScriptFilepath;
_log.debug("Before runing R, script file " + rScriptFilepath);
Process p = Runtime.getRuntime().exec(cmd ,null, TempFileManager.getTmpDir());
_log.debug("after running R");
int err = p.waitFor();
_log.debug("process returned, "+err);
if (err == 0)
success = true;
else
{
TempFileManager.unmarkFileForDeletion(rScriptFile, caller);
throw new RuntimeException("Error in executing R, temp dir is " + TempFileManager.getTmpDir() + ". R process status: " + err, null);
}
//Only try to delete the out file if we successfully ran the script
TempFileManager.markFileForDeletion(new File(rScriptFile.getAbsolutePath() + "out"), caller);
}
catch (Exception e)
{
throw new RuntimeException("Failed to run R code. Details follow.\n" +
"Please make sure that R is a) installed and b) on your PATH. To do this,\n" +
"open a command prompt window and type R (on Linux) or RCMD (on Windows)\n" +
"If the R command is not found, your PATH environment variable needs to be\n" +
"modified to contain the R binary directory.",e);
}
return success;
}
/**
* First index in response indicates row. Second indicates column.
* @param rResponse
* @return
*/
public static float[][] processRMatrixResponse(String rResponse)
{
String[] textLines = rResponse.split("\n");
int numRows = textLines.length-1;
String firstLine = textLines[0];
int numCols=0;
int lastWhitespaceFirstLine=0;
while (Character.isWhitespace(firstLine.charAt(lastWhitespaceFirstLine)))
lastWhitespaceFirstLine++;
firstLine=firstLine.substring(lastWhitespaceFirstLine);
String[] columnNames = firstLine.split("\\s+");
numCols = columnNames.length;
_log.debug("processRMatrixResponse, first line: " + firstLine + ", numCols=" + numCols);
float[][] result = new float[numRows][numCols];
for (int i=0; i<numRows; i++)
{
String line = textLines[i+1];
line=line.substring(line.indexOf("]")+1);
int lastWhitespace=0;
while (Character.isWhitespace(line.charAt(lastWhitespace)))
lastWhitespace++;
line=line.substring(lastWhitespace);
String[] entries = line.split("\\s+");
// int numEmpties=0;
// if (entries[0] == null || entries[0].length()==0)
// numEmpties++;
for (int j=0; j<numCols; j++)
{
try
{
result[i][j] = Float.parseFloat(entries[j]);
}
catch (RuntimeException e)
{
ApplicationContext.infoMessage("FAILED while processing line " + i + ": **" + line + "**, specifically the value **" + entries[j] + "**, entry " + j);
throw e;
}
}
}
return result;
}
/**
* cute little method that handles a n R coefficients() response by ignoring the first line
* and splitting the second around whitespace. Brittle, don't give it anything at all funky
* @param rResponse
* @return
*/
public static double[] processRCoefficientResponse(String rResponse)
throws NumberFormatException
{
_log.debug("Parsing R response:\n***\n"+rResponse+"\n***\n");
List<Float> resultList = new ArrayList<Float>();
String justTheNumbers = rResponse;
if (justTheNumbers.contains("\n"))
justTheNumbers = justTheNumbers.substring(rResponse.indexOf('\n'));
//R responses can get split over multiple lines. In this case, that means
//alternating lines of header, values
String[] textLines = rResponse.split("\n");
if (textLines.length > 2)
{
_log.debug("Multiple (" + textLines.length + ") lines in R response");
justTheNumbers = textLines[1];
for (int i=3; i<textLines.length; i+=2)
justTheNumbers = justTheNumbers + textLines[i];
}
_log.debug("just the numbers:\n***\n"+justTheNumbers+"\n***\n");
List<Float> coeffsThisLine = processCoefficientOnlyLine(justTheNumbers);
for (float coeff : coeffsThisLine)
resultList.add(coeff);
if (resultList.size() < 2)
throw new NumberFormatException("Problem parsing coefficient response from R");
double[] result = new double[resultList.size()];
for (int i=0; i<result.length; i++)
result[i] = resultList.get(i);
return result;
}
/**
* Parse the important bits of a response from R's 'coeff' command
* @param coefficientLine
* @return
*/
protected static List<Float> processCoefficientOnlyLine(String coefficientLine)
{
List<Float> result = new ArrayList<Float>();
String[] stringCoeffs = coefficientLine.split("\\s");
for (int i=0; i<stringCoeffs.length; i++)
{
_log.debug("@@@"+stringCoeffs[i]+"@@@");
if (stringCoeffs[i].length() > 0)
{
result.add(Float.parseFloat(stringCoeffs[i]));
}
}
return result;
}
public static Map<String, String> extractVariableStringsFromListOutput(String listOutput)
{
_log.debug("Extracting variable strings from list output...");
String[] lines = listOutput.split("\\n");
Map<String, String> result = new HashMap<String, String>();
StringBuffer currentVarBuf = null;
String currentVarName = null;
for (String line : lines)
{
line=line.trim();
if (line.startsWith("$"))
{
if (currentVarName != null)
{
_log.debug("extractVarStrings: found var " + currentVarName);
result.put(currentVarName, currentVarBuf.toString());
}
currentVarName = line.substring(1);
currentVarBuf = new StringBuffer();
}
else
{
currentVarBuf.append("\n" + line);
}
}
result.put(currentVarName, currentVarBuf.toString());
return result;
}
/**
* Evaluate an R command, giving it the default amount of time to respond
* @param rCommand command to execute
* @return results from R
*/
public static String evaluateRCommand(String rCommand)
{
return evaluateRCommand(rCommand, DEFAULT_MAX_R_WAIT_MILLIS);
}
/**
* Talks to the ReaderThread to check periodically for the specified sentinel.
* TODO: make this more efficient. Right now, at every read, it pulls back the whole response
* @param readerThread
* @param endResponseSentinel
* @param maxMillisToWaitForResponse
* @return
* @throws IOException
*/
protected static String collectInput(RReaderThread readerThread,
Process p,
String endResponseSentinel,
int maxMillisToWaitForResponse)
throws IOException
{
int totalMillisSlept = 0;
String responseString = "";
// _log.debug("collectInput, millis to wait: " + maxMillisToWaitForResponse);
//loop until sentinel shows up. Likely we'll get it right away, but
//loop anyway just in case R is slow, which it can be
responseString = readerThread.getReadString();
//every time we get more back from R, we convert the whole thing to a String.
//This is wasteful, but it's necessary to make ABSOLUTELY SURE we capture
//the sentinel if it occurs. Besides, likely we won't do this more than once
//or maybe twice on typical commands
while (readerThread.status == RReaderThread.STATUS_READING &&
(endResponseSentinel == null ||
!(responseString).contains(endResponseSentinel)))
{
boolean processIsAlive = false;
int exitValue = 0;
try
{
exitValue = p.exitValue();
}
catch (IllegalThreadStateException itse)
{
processIsAlive = true;
}
if (!processIsAlive)
{
StringBuffer exceptionMessageBuf = new StringBuffer("R Process exited before done reading, with status " + exitValue + ".\n");
if (responseString.length() < 5000)
{
exceptionMessageBuf.append("Output from process: " + responseString);
}
else
{
exceptionMessageBuf.append("Output from process is too long to display (" +
responseString.length() + " chars)");
}
throw new IOException(exceptionMessageBuf.toString());
}
//System.err.println(" loop, " + responseString);
if (totalMillisSlept > maxMillisToWaitForResponse)
{
break;
}
//sleep for one increment
try
{
Thread.sleep(R_SLEEP_INCREMENT_MILLIS);
totalMillisSlept += R_SLEEP_INCREMENT_MILLIS;
}
catch (InterruptedException e)
{
}
responseString = readerThread.getReadString();
}
_log.debug("collectInput, readerThread status: " + readerThread.status);
if (readerThread.status == RReaderThread.STATUS_ERROR)
throw readerThread.exception;
return responseString;
}
/**
* Handles a single write of a byte array to R
*/
public static class RWriterThread extends Thread
{
OutputStream out = null;
byte[] bytes = null;
public boolean done = false;
public RWriterThread(Process rProcess, byte[] bytes)
{
out = new DataOutputStream(new BufferedOutputStream(rProcess.getOutputStream(), 8000));
this.bytes = bytes;
}
public void run()
{
try
{
for (int i = 0; i < bytes.length; i++)
{
out.write(bytes[i]);
out.flush();
}
} catch (Throwable t)
{
t.printStackTrace(System.out);
}
done = true;
}
}
/**
* Manages the R error inputstream.
* On some systems, the error inputstream must be read periodically or writing to the
* outputstream will hang. In particular, I've seen output writing hang after 8kb
* are written; apparently something gets written to the error outputstream during that
* write, and unless it's written, nothing doing.
*/
public static class RErrorReaderThread extends Thread
{
protected InputStream inputStream = null;
protected StringBuffer accumulatedResponse = new StringBuffer();
protected StringBuffer newDataBuf = new StringBuffer();
protected boolean keepReading = true;
public IOException exception = null;
public int status = STATUS_READING;
public static final int STATUS_READING = 0;
public static final int STATUS_ERROR = 1;
protected boolean hasNewData = false;
public RErrorReaderThread(Process p)
{
inputStream = new BufferedInputStream(p.getErrorStream());
}
public void run()
{
try
{
_log.debug("Starting error thread");
while(keepReading)
{
int currentBytesAvailable = inputStream.available();
if (currentBytesAvailable > 0)
{
byte[] rResponse = new byte[currentBytesAvailable];
inputStream.read(rResponse);
_log.debug("R ERROR reader got output: " + new String(rResponse));
accumulatedResponse.append(new String(rResponse));
hasNewData = true;
newDataBuf.append(new String(rResponse));
}
//sleep for one increment
try
{
Thread.sleep(R_SLEEP_INCREMENT_MILLIS);
}
catch (InterruptedException e)
{
_log.debug("error thread interrupted");
}
}
if (inputStream != null)
inputStream.close();
_log.debug("Error reader successfully shutdown");
}
catch (IOException e)
{
_log.error("Failure while reading R response", e);
status = STATUS_ERROR;
exception = e;
}
}
public void shutdown()
{
keepReading = false;
}
public String getReadString()
{
return accumulatedResponse.toString();
}
public boolean hasNewData()
{
return hasNewData;
}
public String getNewData()
{
String result = newDataBuf.toString();
newDataBuf = new StringBuffer();
return result;
}
}
/**
* Latches onto the R process' input stream and doesn't let go
* TODO: provide a way to get access to just whatever was read since last time you checked
*/
public static class RReaderThread extends Thread
{
InputStream inputStream = null;
StringBuffer accumulatedResponse = new StringBuffer();
protected boolean keepReading = true;
public int totalMillisSlept = 0;
public IOException exception = null;
public int status = STATUS_READING;
public static final int STATUS_READING = 0;
public static final int STATUS_ERROR = 1;
public RReaderThread(Process p)
{
inputStream = p.getInputStream();
}
public void run()
{
try
{
_log.debug("Starting R output reader thread");
while(keepReading)
{
int currentBytesAvailable = inputStream.available();
if (currentBytesAvailable > 0)
{
byte[] rResponse = new byte[currentBytesAvailable];
inputStream.read(rResponse);
String responseString = new String(rResponse);
_log.debug("R output reader got output: " + responseString);
accumulatedResponse.append(responseString);
//System.err.println(new String(rResponse));
}
//sleep for one increment
try
{
Thread.sleep(R_SLEEP_INCREMENT_MILLIS);
totalMillisSlept += R_SLEEP_INCREMENT_MILLIS;
}
catch (InterruptedException e)
{
}
}
if (inputStream != null)
inputStream.close();
_log.debug("Reader successfully shutdown");
}
catch (IOException e)
{
_log.error("Failure while reading R response", e);
status = STATUS_ERROR;
exception = e;
}
}
public void shutdown()
{
keepReading = false;
}
public String getReadString()
{
return accumulatedResponse.toString();
}
}
/**
* Cover method to start up a writer thread, send it some bytes, and make sure they got written
* @param p
* @param bytesToWrite
* @throws IOException
*/
public static void writeToR(Process p, byte[] bytesToWrite)
throws IOException
{
RWriterThread wt = new RWriterThread(p, bytesToWrite);
wt.start();
while (!wt.done)
{
try
{
Thread.sleep(15);
}
catch(InterruptedException ie)
{
}
}
}
/**
* Read in a fully-qualified resource on the classpath, i.e., an R script
* @param resourcePath
* @return
*/
public static String readResourceFile(String resourcePath)
throws IOException
{
_log.debug("readResourceFile, resourcePath: " + resourcePath);
InputStream in = RInterface.class.getResourceAsStream(resourcePath);
if (in == null)
throw new IOException("ERROR!! null resource!");
StringBuffer commandBuf = new StringBuffer();
int readchar;
while ((readchar = in.read()) != -1)
commandBuf.append((char) readchar);
return commandBuf.toString();
}
/**
* Evaluate an R command, or series of commands. Time out if we wait longer than maxMillis...
* Return a every single thing that R gives back, with whitespace trimmed from start and end.
* In order to make sure we wait the appropriate amount of time, and to make sure that we return
* only R's response and nothing else, I do the following:
*
* 1. Place a sentinel that will be echoed before the command response
* 2. Place a sentinel (that can be evaluated by R) after a newline, so that R will echo it after
* the command completes. That way we know to ignore everything after the second sentinel, and we know when
* we're done
* 3. When we get back the response, take everything between the two sentinel responses. Then, if that
* response contains any "Package loaded" lines, take them out, too
*
* If R fails for any reason, throw a RuntimeException
* @param rCommand
* @param maxMillisToWaitForResponse
* @return the result from R
*/
public static String evaluateRCommand(String rCommand, int maxMillisToWaitForResponse)
{
_log.debug("Running R command:");
// _log.debug(rCommand);
while(Character.isWhitespace(rCommand.charAt(rCommand.length()-1)))
rCommand = rCommand.substring(0, rCommand.length()-1);
long startTime = new Date().getTime();
String result = null;
String responseString = "";
boolean timedOut = false;
Process p = null;
RReaderThread responseReaderThread = null;
RErrorReaderThread errorReaderThread = null;
try
{
_log.debug("Starting R...");
String cmd = "R --vanilla --slave";
//Kick off R, set up the input and output streams, write the full command and sentinels
p = Runtime.getRuntime().exec(cmd, null, TempFileManager.getTmpDir());
_log.debug("R process started.");
//this is necessary for Windows. R initially produces some output
//when you kick it off, and if you don't collect it Windows hangs
//eternally.
responseReaderThread = new RReaderThread(p);
responseReaderThread.start();
errorReaderThread = new RErrorReaderThread(p);
errorReaderThread.start();
byte[] bytesToR = rCommand.getBytes();
_log.debug("Sending command to R. " + bytesToR.length + " bytes....");
//System.err.println("****************");
//System.err.println(new String(bytesToR));
//System.err.println("****************");
String sentinel1 = "\"SENTINEL_SUPERCaliFRAGILIsticEXPIAlidOCIOUS1_SENTINEL\"";
String sentinel2 = "\"SENTINEL_SUPERCaliFRAGILIsticEXPIAlidOCIOUS2_SENTINEL\"";
writeToR(p, ("\n" + sentinel1 + '\n').getBytes());
writeToR(p, bytesToR);
writeToR(p, ("\n" + sentinel2 + '\n').getBytes());
_log.debug("Sent command to R.");
//read from the input stream until we come to the end-command sentinel,
//which will be after the echo of our input but before the response.
//Reduce the max time to wait by however long we've already waited.
_log.debug("Waiting for output...");
//read from the input stream until we come to the second sentinel,
//which will be echoed after we get our response.
//Reduce the max time to wait by however long we've already waited.
responseString = collectInput(responseReaderThread, p, sentinel2,
(int) ((maxMillisToWaitForResponse) -
(new Date().getTime() - startTime)));
if (responseString == null ||
!responseString.contains(sentinel2))
{
// _log.debug(responseString);
if (new Date().getTime() - startTime > maxMillisToWaitForResponse)
{
timedOut = true;
throw new RuntimeException("timed out");
}
else
{
throw new RuntimeException("unknown error, didn't get sentinel");
}
}
//System.err.println("Raw R response: " + responseString);
_log.debug("Got sentinel. Response length: " + responseString.length());
//at this point we've both sentinels.
//Get rid of the last line, which is the sentinel response
int startIndex = responseString.indexOf(sentinel1) + sentinel1.length();
while (Character.isWhitespace(responseString.charAt(startIndex)))
startIndex++;
int firstBadIndex = responseString.indexOf(sentinel2);
while (responseString.charAt(firstBadIndex) != '\n')
firstBadIndex--;
result = responseString.substring(startIndex, firstBadIndex);
//We may get "package loaded" or "null device" lines. If so, ignore them
while (result.startsWith("Package") || result.startsWith("null device"))
{
String firstLine = result.substring(0, result.indexOf("\n"));
if (result.startsWith("null device") || firstLine.contains("loaded."))
{
result = result.substring(firstLine.length() + 1);
}
else
break;
}
_log.debug("Important part of response (length " + result.length() + "), with whitespace: " + result);
//_log.debug(result);
//strip whitespace from beginning and end
while (Character.isWhitespace(result.charAt(0)))
result = result.substring(1);
while (Character.isWhitespace(result.charAt(result.length()-1)))
result = result.substring(0,result.length()-1);
_log.debug("Stripped whitespace from response");
}
catch (Exception e)
{
String failureReason = "";
if (timedOut)
{
failureReason = "Timed out while calling R. Max millis: " + maxMillisToWaitForResponse +
", waited " + (new Date().getTime() - startTime);
}
else
{
failureReason = "Error calling R, temp dir is " + TempFileManager.getTmpDir() + ". ";
try
{
if (responseReaderThread != null)
{
failureReason += failureReason + "R output:\n" +
collectInput(responseReaderThread, p, null, 100);
}
else
failureReason += "No error output from R to display. Error Message: " + e.getMessage() + ", Exception class: " + e.getClass().getName();
}
catch (Exception ex)
{
failureReason += "Failed while interrogating R error output";
}
finally
{
if (errorReaderThread.hasNewData())
failureReason = failureReason + "\n Error Output: " + errorReaderThread.getNewData();
e.printStackTrace(System.err);
}
}
throw new RuntimeException(failureReason,e);
}
finally
{
//close all the input and output streams, kill the process
if (responseReaderThread != null)
responseReaderThread.shutdown();
if (errorReaderThread != null)
errorReaderThread.shutdown();
//Give the threads a chance to shut down before destroying the process
try
{
Thread.sleep(10);
}
catch (InterruptedException e)
{
}
try
{
if (p != null)
p.destroy();
}
catch (Exception e)
{
_log.debug("Failed to close R process. How sad.");
}
}
return result;
}
/**
* cover method with default wait time, vector variables only
* @param rExpression
* @param variableValues
* @param dependentPackageNames
* @return
*/
public static String evaluateRExpression(String rExpression,
Map<String, double[]> variableValues,
String[] dependentPackageNames)
{
return evaluateRExpression(rExpression, variableValues,
dependentPackageNames, DEFAULT_MAX_R_WAIT_MILLIS);
}
/**
* Generic method for running an R expression and feeding the output to a file.
* Populates variable values and loads packages if necessary
* @param rExpression
* @return
*/
public static String evaluateRExpression(String rExpression,
Map<String, double[]> variableValues,
String[] dependentPackageNames,
int maxMillisToWaitForResponse)
{
return evaluateRExpression(rExpression, variableValues, null,
dependentPackageNames, maxMillisToWaitForResponse);
}
/**
* Cover method with default wait time
* @param rExpression
* @param vectorVariableValues
* @param matrixVariableValues
* @param dependentPackageNames
* @return
*/
public static String evaluateRExpression(String rExpression,
Map<String, double[]> vectorVariableValues,
Map<String, double[][]> matrixVariableValues,
String[] dependentPackageNames)
{
return evaluateRExpression(rExpression, vectorVariableValues, matrixVariableValues,
dependentPackageNames, DEFAULT_MAX_R_WAIT_MILLIS);
}
public static String evaluateRExpression(String rExpression,
Map<String, double[]> vectorVariableValues,
Map<String, double[][]> matrixVariableValues,
String[] dependentPackageNames,
int maxMillisToWaitForResponse)
{
return evaluateRExpression(rExpression, null, vectorVariableValues, matrixVariableValues,
dependentPackageNames, maxMillisToWaitForResponse);
}
/**
* R doesn't like backslashes, even on Windows. So replace them with forward slashes
* @param file
* @return
*/
public static String generateRFriendlyPath(File file)
{
String filePath = file.getAbsolutePath();
filePath = filePath.replaceAll("\\\\","/");
return filePath;
}
/**
* Generic method for running an R expression and feeding the output to a file.
* Populates variable values and loads packages if necessary
* @param rExpression
* @return
*/
public static String evaluateRExpression(String rExpression,
Map<String, Object> scalarVariableValues,
Map<String, double[]> vectorVariableValues,
Map<String, double[][]> matrixVariableValues,
String[] dependentPackageNames,
int maxMillisToWaitForResponse)
{
StringBuffer rCommand = new StringBuffer();
if (dependentPackageNames != null && dependentPackageNames.length > 0)
{
for (String dependentPackageName : dependentPackageNames)
{
rCommand.append("library('" + dependentPackageName + "')\n");
}
}
if (scalarVariableValues != null && scalarVariableValues.size() > 0)
{
for (String variableName : scalarVariableValues.keySet())
{
rCommand.append(variableName + "<-" + scalarVariableValues.get(variableName) + "\n");
}
}
if (vectorVariableValues != null && vectorVariableValues.size() > 0)
{
for (String variableName : vectorVariableValues.keySet())
{
rCommand.append(variableName + "<-c(");
double[] thisVarValues = vectorVariableValues.get(variableName);
for (int i=0; i<thisVarValues.length; i++)
{
rCommand.append(thisVarValues[i]);
if (i < thisVarValues.length-1)
rCommand.append(",\n");
}
rCommand.append(")\n");
}
}
if (matrixVariableValues != null && matrixVariableValues.size() > 0)
{
for (String variableName : matrixVariableValues.keySet())
{
rCommand.append(variableName + "<-matrix(c(");
double[][] thisVarValues = matrixVariableValues.get(variableName);
for (int i=0; i<thisVarValues.length; i++)
{
for (int j=0; j < thisVarValues[0].length; j++)
{
rCommand.append(thisVarValues[i][j]);
//append the comma unless we're on the very last cell
if (i < thisVarValues.length-1 ||
j < thisVarValues[0].length-1)
rCommand.append(",");
//this is just formatting, for debug purposes
if (j == thisVarValues[0].length-1)
rCommand.append("\n");
}
}
rCommand.append("),nrow=" + thisVarValues.length +
",ncol="+thisVarValues[0].length + ", byrow=TRUE)\n");
}
}
rCommand.append(rExpression);
return evaluateRCommand(rCommand.toString(), maxMillisToWaitForResponse);
}
/**
* Creates a file containing the values to be loaded into the R variable with name variableName,
* and returns the R syntax necessary to load the values into the variable.
* If not successful, throws an exception
* @param variableName
* @param variableValues
* @return
*/
/* This was useful when everything was file-based. Not so much any more
protected static String populateArrayValueFromFile(String variableName, double[] variableValues,
Object caller)
throws FileNotFoundException
{
File tempVarValueFile =
TempFileManager.createTempFile("RInterface.populateArrayValueFromFile.var." + variableName,
caller);
PrintWriter pw = null;
try
{
pw = new PrintWriter(tempVarValueFile);
for (double variableValue : variableValues)
pw.println(variableValue);
}
finally
{
if (pw != null)
pw.close();
}
return variableName + "<-read.table('" + tempVarValueFile.getAbsolutePath() +"',header=FALSE)[,1]";
}
*/
}
| One more change to RInterface
| src/org/fhcrc/cpl/toolbox/RInterface.java | One more change to RInterface |
|
Java | apache-2.0 | aed99a3632521d0e232f75d0065213b90aae5733 | 0 | virajs/selenium-1,winhamwr/selenium,winhamwr/selenium,virajs/selenium-1,winhamwr/selenium,virajs/selenium-1,winhamwr/selenium,virajs/selenium-1,winhamwr/selenium,virajs/selenium-1,virajs/selenium-1,winhamwr/selenium,winhamwr/selenium,virajs/selenium-1,winhamwr/selenium,virajs/selenium-1,virajs/selenium-1 | /*
Copyright 2007-2009 WebDriver committers
Copyright 2007-2009 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openqa.selenium;
import static org.openqa.selenium.Ignore.Driver.ANDROID;
import static org.openqa.selenium.Ignore.Driver.CHROME;
import static org.openqa.selenium.Ignore.Driver.IE;
import static org.openqa.selenium.Ignore.Driver.IPHONE;
import static org.openqa.selenium.Ignore.Driver.OPERA;
import static org.openqa.selenium.Ignore.Driver.SELENESE;
import static org.openqa.selenium.TestWaiter.waitFor;
import static org.openqa.selenium.WaitingConditions.pageTitleToBe;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.equalTo;
import org.openqa.selenium.environment.GlobalTestEnvironment;
public class PageLoadingTest extends AbstractDriverTestCase {
public void testShouldWaitForDocumentToBeLoaded() {
driver.get(pages.simpleTestPage);
assertThat(driver.getTitle(), equalTo("Hello WebDriver"));
}
public void testShouldFollowRedirectsSentInTheHttpResponseHeaders() {
driver.get(pages.redirectPage);
assertThat(driver.getTitle(), equalTo("We Arrive Here"));
}
@Ignore(ANDROID)
public void testShouldFollowMetaRedirects() throws Exception {
driver.get(pages.metaRedirectPage);
assertThat(driver.getTitle(), equalTo("We Arrive Here"));
}
@Ignore(SELENESE)
public void testShouldBeAbleToGetAFragmentOnTheCurrentPage() {
driver.get(pages.xhtmlTestPage);
driver.get(pages.xhtmlTestPage + "#text");
driver.findElement(By.id("id1"));
}
@Ignore(SELENESE)
public void testShouldReturnWhenGettingAUrlThatDoesNotResolve() {
try {
// Of course, we're up the creek if this ever does get registered
driver.get("http://www.thisurldoesnotexist.comx/");
} catch (IllegalStateException e) {
if (!isIeDriverTimedOutException(e)) {
throw e;
}
}
}
@Ignore({IPHONE, SELENESE})
public void testShouldReturnWhenGettingAUrlThatDoesNotConnect() {
// Here's hoping that there's nothing here. There shouldn't be
driver.get("http://localhost:3001");
}
@Ignore({IE, IPHONE, SELENESE, ANDROID})
public void testShouldBeAbleToLoadAPageWithFramesetsAndWaitUntilAllFramesAreLoaded() {
driver.get(pages.framesetPage);
driver.switchTo().frame(0);
WebElement pageNumber = driver.findElement(By.xpath("//span[@id='pageNumber']"));
assertThat(pageNumber.getText().trim(), equalTo("1"));
driver.switchTo().defaultContent().switchTo().frame(1);
pageNumber = driver.findElement(By.xpath("//span[@id='pageNumber']"));
assertThat(pageNumber.getText().trim(), equalTo("2"));
}
@Ignore({IPHONE, SELENESE})
@NeedsFreshDriver
public void testSouldDoNothingIfThereIsNothingToGoBackTo() {
String originalTitle = driver.getTitle();
driver.get(pages.formPage);
driver.navigate().back();
// We may have returned to the browser's home page
assertThat(driver.getTitle(), anyOf(equalTo(originalTitle), equalTo("We Leave From Here")));
}
@Ignore({SELENESE, ANDROID})
public void testShouldBeAbleToNavigateBackInTheBrowserHistory() {
driver.get(pages.formPage);
driver.findElement(By.id("imageButton")).submit();
assertThat(driver.getTitle(), equalTo("We Arrive Here"));
driver.navigate().back();
assertThat(driver.getTitle(), equalTo("We Leave From Here"));
}
@Ignore(SELENESE)
public void testShouldBeAbleToNavigateBackInTheBrowserHistoryInPresenceOfIframes() {
driver.get(pages.xhtmlTestPage);
driver.findElement(By.name("sameWindow")).click();
waitFor(pageTitleToBe(driver, "This page has iframes"));
assertThat(driver.getTitle(), equalTo("This page has iframes"));
driver.navigate().back();
assertThat(driver.getTitle(), equalTo("XHTML Test Page"));
}
@Ignore({SELENESE, ANDROID})
public void testShouldBeAbleToNavigateForwardsInTheBrowserHistory() {
driver.get(pages.formPage);
driver.findElement(By.id("imageButton")).submit();
assertThat(driver.getTitle(), equalTo("We Arrive Here"));
driver.navigate().back();
assertThat(driver.getTitle(), equalTo("We Leave From Here"));
driver.navigate().forward();
assertThat(driver.getTitle(), equalTo("We Arrive Here"));
}
@Ignore({IE, CHROME, SELENESE, IPHONE, OPERA, ANDROID})
public void testShouldBeAbleToAccessPagesWithAnInsecureSslCertificate() {
// TODO(user): Set the SSL capability to true.
String url = GlobalTestEnvironment.get().getAppServer().whereIsSecure("simpleTest.html");
driver.get(url);
assertThat(driver.getTitle(), equalTo("Hello WebDriver"));
}
@Ignore(SELENESE)
public void testShouldBeAbleToRefreshAPage() {
driver.get(pages.xhtmlTestPage);
driver.navigate().refresh();
assertThat(driver.getTitle(), equalTo("XHTML Test Page"));
}
/**
* @throws Exception If the test fails.
* @see <a href="http://code.google.com/p/selenium/issues/detail?id=208"> Issue 208</a>
*
* This test often causes the subsequent test to fail, in Firefox, on Linux, so we need
* a new driver after it.
* @see <a href="http://code.google.com/p/selenium/issues/detail?id=2282">Issue 2282</a>
*/
@Ignore(value = {IE, SELENESE, IPHONE, OPERA, ANDROID}, reason = "Untested user-agents")
@NoDriverAfterTest
@JavascriptEnabled
public void testShouldNotHangIfDocumentOpenCallIsNeverFollowedByDocumentCloseCall()
throws Exception {
driver.get(pages.documentWrite);
// If this command succeeds, then all is well.
WebElement body = driver.findElement(By.tagName("body"));
waitFor(WaitingConditions.elementTextToContain(body, "world"));
}
@Ignore
public void testShouldNotWaitIndefinitelyIfAnExternalResourceFailsToLoad() {
String slowPage = appServer.whereIs("slowLoadingResourcePage.html");
long start = System.currentTimeMillis();
driver.get(slowPage);
System.out.println("Proceeding: " + (System.currentTimeMillis() - start));
// We discard the element, but want a check to make sure the GET actually
// completed.
driver.findElement(By.id("peas"));
long end = System.currentTimeMillis();
// The slow loading resource on that page takes 6 seconds to return. If we
// waited for it, our load time should be over 6 seconds.
long duration = end - start;
assertTrue("Took too long to load page: " + duration, duration < 5*1000);
}
}
| java/client/test/org/openqa/selenium/PageLoadingTest.java | /*
Copyright 2007-2009 WebDriver committers
Copyright 2007-2009 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openqa.selenium;
import static org.openqa.selenium.Ignore.Driver.ANDROID;
import static org.openqa.selenium.Ignore.Driver.CHROME;
import static org.openqa.selenium.Ignore.Driver.FIREFOX;
import static org.openqa.selenium.Ignore.Driver.IE;
import static org.openqa.selenium.Ignore.Driver.IPHONE;
import static org.openqa.selenium.Ignore.Driver.OPERA;
import static org.openqa.selenium.Ignore.Driver.SELENESE;
import static org.openqa.selenium.TestWaiter.waitFor;
import static org.openqa.selenium.WaitingConditions.pageTitleToBe;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.equalTo;
import org.openqa.selenium.environment.GlobalTestEnvironment;
public class PageLoadingTest extends AbstractDriverTestCase {
public void testShouldWaitForDocumentToBeLoaded() {
driver.get(pages.simpleTestPage);
assertThat(driver.getTitle(), equalTo("Hello WebDriver"));
}
public void testShouldFollowRedirectsSentInTheHttpResponseHeaders() {
driver.get(pages.redirectPage);
assertThat(driver.getTitle(), equalTo("We Arrive Here"));
}
@Ignore(ANDROID)
public void testShouldFollowMetaRedirects() throws Exception {
driver.get(pages.metaRedirectPage);
assertThat(driver.getTitle(), equalTo("We Arrive Here"));
}
@Ignore(SELENESE)
public void testShouldBeAbleToGetAFragmentOnTheCurrentPage() {
driver.get(pages.xhtmlTestPage);
driver.get(pages.xhtmlTestPage + "#text");
driver.findElement(By.id("id1"));
}
@Ignore(SELENESE)
public void testShouldReturnWhenGettingAUrlThatDoesNotResolve() {
try {
// Of course, we're up the creek if this ever does get registered
driver.get("http://www.thisurldoesnotexist.comx/");
} catch (IllegalStateException e) {
if (!isIeDriverTimedOutException(e)) {
throw e;
}
}
}
@Ignore({IPHONE, SELENESE})
public void testShouldReturnWhenGettingAUrlThatDoesNotConnect() {
// Here's hoping that there's nothing here. There shouldn't be
driver.get("http://localhost:3001");
}
@Ignore({IE, IPHONE, SELENESE, ANDROID})
public void testShouldBeAbleToLoadAPageWithFramesetsAndWaitUntilAllFramesAreLoaded() {
driver.get(pages.framesetPage);
driver.switchTo().frame(0);
WebElement pageNumber = driver.findElement(By.xpath("//span[@id='pageNumber']"));
assertThat(pageNumber.getText().trim(), equalTo("1"));
driver.switchTo().defaultContent().switchTo().frame(1);
pageNumber = driver.findElement(By.xpath("//span[@id='pageNumber']"));
assertThat(pageNumber.getText().trim(), equalTo("2"));
}
@Ignore({IPHONE, SELENESE})
@NeedsFreshDriver
public void testSouldDoNothingIfThereIsNothingToGoBackTo() {
String originalTitle = driver.getTitle();
driver.get(pages.formPage);
driver.navigate().back();
// We may have returned to the browser's home page
assertThat(driver.getTitle(), anyOf(equalTo(originalTitle), equalTo("We Leave From Here")));
}
@Ignore({SELENESE, ANDROID})
public void testShouldBeAbleToNavigateBackInTheBrowserHistory() {
driver.get(pages.formPage);
driver.findElement(By.id("imageButton")).submit();
assertThat(driver.getTitle(), equalTo("We Arrive Here"));
driver.navigate().back();
assertThat(driver.getTitle(), equalTo("We Leave From Here"));
}
@Ignore(SELENESE)
public void testShouldBeAbleToNavigateBackInTheBrowserHistoryInPresenceOfIframes() {
driver.get(pages.xhtmlTestPage);
driver.findElement(By.name("sameWindow")).click();
waitFor(pageTitleToBe(driver, "This page has iframes"));
assertThat(driver.getTitle(), equalTo("This page has iframes"));
driver.navigate().back();
assertThat(driver.getTitle(), equalTo("XHTML Test Page"));
}
@Ignore({SELENESE, ANDROID})
public void testShouldBeAbleToNavigateForwardsInTheBrowserHistory() {
driver.get(pages.formPage);
driver.findElement(By.id("imageButton")).submit();
assertThat(driver.getTitle(), equalTo("We Arrive Here"));
driver.navigate().back();
assertThat(driver.getTitle(), equalTo("We Leave From Here"));
driver.navigate().forward();
assertThat(driver.getTitle(), equalTo("We Arrive Here"));
}
@Ignore({IE, CHROME, SELENESE, IPHONE, OPERA, ANDROID})
public void testShouldBeAbleToAccessPagesWithAnInsecureSslCertificate() {
// TODO(user): Set the SSL capability to true.
String url = GlobalTestEnvironment.get().getAppServer().whereIsSecure("simpleTest.html");
driver.get(url);
assertThat(driver.getTitle(), equalTo("Hello WebDriver"));
}
@Ignore(SELENESE)
public void testShouldBeAbleToRefreshAPage() {
driver.get(pages.xhtmlTestPage);
driver.navigate().refresh();
assertThat(driver.getTitle(), equalTo("XHTML Test Page"));
}
/**
* @throws Exception If the test fails.
* @see <a href="http://code.google.com/p/selenium/issues/detail?id=208"> Issue 208</a>
*
* This test often causes the subsequent test to fail, in Firefox, on Linux, so we need
* a new driver after it.
* @see <a href="http://code.google.com/p/selenium/issues/detail?id=2282">Issue 2282</a>
*/
@Ignore(value = {IE, SELENESE, IPHONE, OPERA, ANDROID}, reason = "Untested user-agents")
@NoDriverAfterTest
@JavascriptEnabled
public void testShouldNotHangIfDocumentOpenCallIsNeverFollowedByDocumentCloseCall()
throws Exception {
driver.get(pages.documentWrite);
// If this command succeeds, then all is well.
WebElement body = driver.findElement(By.tagName("body"));
waitFor(WaitingConditions.elementTextToContain(body, "world"));
}
}
| SimonStewart: Adding a failing test that demonstrates the problem caused by slow loading dependencies in frames. This occurs commonly when there are web-bugs and analytics scripts loading slowly
git-svn-id: 4179480af2c2519a5eb5e1e9b541cbdf5cf27696@14132 07704840-8298-11de-bf8c-fd130f914ac9
| java/client/test/org/openqa/selenium/PageLoadingTest.java | SimonStewart: Adding a failing test that demonstrates the problem caused by slow loading dependencies in frames. This occurs commonly when there are web-bugs and analytics scripts loading slowly |
|
Java | apache-2.0 | 58c7c88853f8046d9d35060052525c6187cf825e | 0 | attila-kiss-it/querydsl,robertandrewbain/querydsl,lpandzic/querydsl,mdiazf/querydsl,tomforster/querydsl,lpandzic/querydsl,pkcool/querydsl,Log10Solutions/querydsl,gordski/querydsl,attila-kiss-it/querydsl,lpandzic/querydsl,tomforster/querydsl,mdiazf/querydsl,robertandrewbain/querydsl,mosoft521/querydsl,dharaburda/querydsl,johnktims/querydsl,gordski/querydsl,querydsl/querydsl,balazs-zsoldos/querydsl,attila-kiss-it/querydsl,balazs-zsoldos/querydsl,querydsl/querydsl,Log10Solutions/querydsl,balazs-zsoldos/querydsl,kevinleturc/querydsl,dharaburda/querydsl,kevinleturc/querydsl,querydsl/querydsl,dharaburda/querydsl,pkcool/querydsl,johnktims/querydsl,johnktims/querydsl,izeye/querydsl,gordski/querydsl,mosoft521/querydsl,kevinleturc/querydsl,lpandzic/querydsl,tomforster/querydsl,vveloso/querydsl,pkcool/querydsl,vveloso/querydsl,robertandrewbain/querydsl,izeye/querydsl,mosoft521/querydsl,izeye/querydsl,Log10Solutions/querydsl,mdiazf/querydsl,querydsl/querydsl,vveloso/querydsl | /*
* Copyright 2011, Mysema Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mysema.query.sql;
import java.sql.Connection;
import java.sql.ResultSet;
import com.mysema.query.Projectable;
import com.mysema.query.types.Expression;
import com.mysema.query.types.Path;
import com.mysema.query.types.SubQueryExpression;
import com.mysema.query.types.query.ListSubQuery;
/**
* Query interface for SQL queries
*
* @author tiwe
*
*/
public interface SQLQuery extends SQLCommonQuery<SQLQuery>, Projectable {
/**
* If you use forUpdate() with a backend that uses page or row locks, rows examined by the
* query are write-locked until the end of the current transaction.
*
* Not supported for SQLite and CUBRID
*
* @return
*/
SQLQuery forUpdate();
/**
* Creates an union expression for the given subqueries
*
* @param <RT>
* @param sq
* @return
*/
<RT> Union<RT> union(ListSubQuery<RT>... sq);
/**
* Creates an union expression for the given subqueries
*
* @param <RT>
* @param sq
* @return
*/
<RT> SQLQuery union(Path<?> alias, ListSubQuery<RT>... sq);
/**
* Creates an union expression for the given subqueries
*
* @param <RT>
* @param sq
* @return
*/
<RT> Union<RT> union(SubQueryExpression<RT>... sq);
/**
* Creates an union expression for the given subqueries
*
* @param <RT>
* @param sq
* @return
*/
<RT> SQLQuery union(Path<?> alias, SubQueryExpression<RT>... sq);
/**
* Creates an union expression for the given subqueries
*
* @param <RT>
* @param sq
* @return
*/
<RT> Union<RT> unionAll(ListSubQuery<RT>... sq);
/**
* Creates an union expression for the given subqueries
*
* @param <RT>
* @param sq
* @return
*/
<RT> SQLQuery unionAll(Path<?> alias, ListSubQuery<RT>... sq);
/**
* Creates an union expression for the given subqueries
*
* @param <RT>
* @param sq
* @return
*/
<RT> Union<RT> unionAll(SubQueryExpression<RT>... sq);
/**
* Creates an union expression for the given subqueries
*
* @param <RT>
* @param sq
* @return
*/
<RT> SQLQuery unionAll(Path<?> alias, SubQueryExpression<RT>... sq);
/**
* Clone the state of the Query for the given Connection
*
* @param conn
* @return
*/
SQLQuery clone(Connection conn);
/**
* Get the results as an JDBC result set
*
* @param args
* @return
*/
ResultSet getResults(Expression<?>... args);
}
| querydsl-sql/src/main/java/com/mysema/query/sql/SQLQuery.java | /*
* Copyright 2011, Mysema Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mysema.query.sql;
import java.sql.Connection;
import java.sql.ResultSet;
import com.mysema.query.Projectable;
import com.mysema.query.types.Expression;
import com.mysema.query.types.Path;
import com.mysema.query.types.SubQueryExpression;
import com.mysema.query.types.query.ListSubQuery;
/**
* Query interface for SQL queries
*
* @author tiwe
*
*/
public interface SQLQuery extends SQLCommonQuery<SQLQuery>, Projectable {
/**
* If you use forUpdate() with a backend that uses page or row locks, rows examined by the
* query are write-locked until the end of the current transaction
*
* @return
*/
SQLQuery forUpdate();
/**
* Creates an union expression for the given subqueries
*
* @param <RT>
* @param sq
* @return
*/
<RT> Union<RT> union(ListSubQuery<RT>... sq);
/**
* Creates an union expression for the given subqueries
*
* @param <RT>
* @param sq
* @return
*/
<RT> SQLQuery union(Path<?> alias, ListSubQuery<RT>... sq);
/**
* Creates an union expression for the given subqueries
*
* @param <RT>
* @param sq
* @return
*/
<RT> Union<RT> union(SubQueryExpression<RT>... sq);
/**
* Creates an union expression for the given subqueries
*
* @param <RT>
* @param sq
* @return
*/
<RT> SQLQuery union(Path<?> alias, SubQueryExpression<RT>... sq);
/**
* Creates an union expression for the given subqueries
*
* @param <RT>
* @param sq
* @return
*/
<RT> Union<RT> unionAll(ListSubQuery<RT>... sq);
/**
* Creates an union expression for the given subqueries
*
* @param <RT>
* @param sq
* @return
*/
<RT> SQLQuery unionAll(Path<?> alias, ListSubQuery<RT>... sq);
/**
* Creates an union expression for the given subqueries
*
* @param <RT>
* @param sq
* @return
*/
<RT> Union<RT> unionAll(SubQueryExpression<RT>... sq);
/**
* Creates an union expression for the given subqueries
*
* @param <RT>
* @param sq
* @return
*/
<RT> SQLQuery unionAll(Path<?> alias, SubQueryExpression<RT>... sq);
/**
* Clone the state of the Query for the given Connection
*
* @param conn
* @return
*/
SQLQuery clone(Connection conn);
/**
* Get the results as an JDBC result set
*
* @param args
* @return
*/
ResultSet getResults(Expression<?>... args);
}
| improved javadoc
| querydsl-sql/src/main/java/com/mysema/query/sql/SQLQuery.java | improved javadoc |
|
Java | apache-2.0 | fae82611db4d5f45f5193d9f9d2967ecdcc6edef | 0 | cbmeeks/vaadin,udayinfy/vaadin,fireflyc/vaadin,oalles/vaadin,travisfw/vaadin,asashour/framework,Legioth/vaadin,asashour/framework,kironapublic/vaadin,mittop/vaadin,sitexa/vaadin,shahrzadmn/vaadin,magi42/vaadin,jdahlstrom/vaadin.react,Darsstar/framework,Darsstar/framework,udayinfy/vaadin,Legioth/vaadin,kironapublic/vaadin,peterl1084/framework,udayinfy/vaadin,bmitc/vaadin,Peppe/vaadin,Peppe/vaadin,peterl1084/framework,mstahv/framework,synes/vaadin,sitexa/vaadin,travisfw/vaadin,Flamenco/vaadin,shahrzadmn/vaadin,bmitc/vaadin,carrchang/vaadin,mstahv/framework,oalles/vaadin,Scarlethue/vaadin,Scarlethue/vaadin,sitexa/vaadin,shahrzadmn/vaadin,mstahv/framework,magi42/vaadin,travisfw/vaadin,asashour/framework,carrchang/vaadin,Peppe/vaadin,magi42/vaadin,bmitc/vaadin,Scarlethue/vaadin,udayinfy/vaadin,carrchang/vaadin,oalles/vaadin,jdahlstrom/vaadin.react,synes/vaadin,Legioth/vaadin,Flamenco/vaadin,fireflyc/vaadin,oalles/vaadin,shahrzadmn/vaadin,cbmeeks/vaadin,mittop/vaadin,carrchang/vaadin,jdahlstrom/vaadin.react,travisfw/vaadin,synes/vaadin,jdahlstrom/vaadin.react,cbmeeks/vaadin,Flamenco/vaadin,sitexa/vaadin,oalles/vaadin,Darsstar/framework,peterl1084/framework,peterl1084/framework,fireflyc/vaadin,jdahlstrom/vaadin.react,fireflyc/vaadin,travisfw/vaadin,mittop/vaadin,Legioth/vaadin,mstahv/framework,shahrzadmn/vaadin,asashour/framework,bmitc/vaadin,udayinfy/vaadin,cbmeeks/vaadin,Peppe/vaadin,Peppe/vaadin,kironapublic/vaadin,peterl1084/framework,sitexa/vaadin,synes/vaadin,Darsstar/framework,magi42/vaadin,Scarlethue/vaadin,kironapublic/vaadin,Flamenco/vaadin,Legioth/vaadin,kironapublic/vaadin,mstahv/framework,Scarlethue/vaadin,mittop/vaadin,Darsstar/framework,fireflyc/vaadin,asashour/framework,magi42/vaadin,synes/vaadin | /*
@ITMillApache2LicenseForJavaFiles@
*/
package com.vaadin.terminal.gwt.client.ui;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.Event;
import com.google.gwt.user.client.Timer;
import com.google.gwt.user.client.ui.FlexTable;
import com.google.gwt.user.client.ui.MouseListener;
import com.google.gwt.user.client.ui.MouseListenerCollection;
import com.google.gwt.user.client.ui.SourcesMouseEvents;
import com.google.gwt.user.client.ui.SourcesTableEvents;
import com.google.gwt.user.client.ui.TableListener;
import com.google.gwt.user.client.ui.Widget;
import com.vaadin.terminal.gwt.client.DateTimeService;
import com.vaadin.terminal.gwt.client.LocaleService;
public class VCalendarPanel extends FlexTable implements MouseListener {
private final VDateField datefield;
private VEventButton prevYear;
private VEventButton nextYear;
private VEventButton prevMonth;
private VEventButton nextMonth;
private VTime time;
private Date minDate = null;
private Date maxDate = null;
private CalendarEntrySource entrySource;
/* Needed to identify resolution changes */
private int resolution = VDateField.RESOLUTION_YEAR;
/* Needed to identify locale changes */
private String locale = LocaleService.getDefaultLocale();
public VCalendarPanel(VDateField parent) {
datefield = parent;
setStyleName(VDateField.CLASSNAME + "-calendarpanel");
// buildCalendar(true);
addTableListener(new DateClickListener(this));
}
public VCalendarPanel(VDateField parent, Date min, Date max) {
datefield = parent;
setStyleName(VDateField.CLASSNAME + "-calendarpanel");
// buildCalendar(true);
addTableListener(new DateClickListener(this));
}
private void buildCalendar(boolean forceRedraw) {
final boolean needsMonth = datefield.getCurrentResolution() > VDateField.RESOLUTION_YEAR;
boolean needsBody = datefield.getCurrentResolution() >= VDateField.RESOLUTION_DAY;
final boolean needsTime = datefield.getCurrentResolution() >= VDateField.RESOLUTION_HOUR;
forceRedraw = prevYear == null ? true : forceRedraw;
buildCalendarHeader(forceRedraw, needsMonth);
clearCalendarBody(!needsBody);
if (needsBody) {
buildCalendarBody();
}
if (needsTime) {
buildTime(forceRedraw);
} else if (time != null) {
remove(time);
time = null;
}
}
private void clearCalendarBody(boolean remove) {
if (!remove) {
for (int row = 2; row < 8; row++) {
for (int col = 0; col < 7; col++) {
setHTML(row, col, " ");
}
}
} else if (getRowCount() > 2) {
while (getRowCount() > 2) {
removeRow(2);
}
}
}
private void buildCalendarHeader(boolean forceRedraw, boolean needsMonth) {
if (forceRedraw) {
if (prevMonth == null) { // Only do once
prevYear = new VEventButton();
prevYear.setHTML("«");
prevYear.setStyleName("v-button-prevyear");
nextYear = new VEventButton();
nextYear.setHTML("»");
nextYear.setStyleName("v-button-nextyear");
prevYear.addMouseListener(this);
nextYear.addMouseListener(this);
setWidget(0, 0, prevYear);
setWidget(0, 4, nextYear);
if (needsMonth) {
prevMonth = new VEventButton();
prevMonth.setHTML("‹");
prevMonth.setStyleName("v-button-prevmonth");
nextMonth = new VEventButton();
nextMonth.setHTML("›");
nextMonth.setStyleName("v-button-nextmonth");
prevMonth.addMouseListener(this);
nextMonth.addMouseListener(this);
setWidget(0, 3, nextMonth);
setWidget(0, 1, prevMonth);
}
getFlexCellFormatter().setColSpan(0, 2, 3);
getRowFormatter().addStyleName(0,
VDateField.CLASSNAME + "-calendarpanel-header");
} else if (!needsMonth) {
// Remove month traverse buttons
prevMonth.removeMouseListener(this);
nextMonth.removeMouseListener(this);
remove(prevMonth);
remove(nextMonth);
prevMonth = null;
nextMonth = null;
}
// Print weekday names
final int firstDay = datefield.getDateTimeService()
.getFirstDayOfWeek();
for (int i = 0; i < 7; i++) {
int day = i + firstDay;
if (day > 6) {
day = 0;
}
if (datefield.getCurrentResolution() > VDateField.RESOLUTION_MONTH) {
setHTML(1, i, "<strong>"
+ datefield.getDateTimeService().getShortDay(day)
+ "</strong>");
} else {
setHTML(1, i, "");
}
}
}
final String monthName = needsMonth ? datefield.getDateTimeService()
.getMonth(datefield.getShowingDate().getMonth()) : "";
final int year = datefield.getShowingDate().getYear() + 1900;
setHTML(0, 2, "<span class=\"" + VDateField.CLASSNAME
+ "-calendarpanel-month\">" + monthName + " " + year
+ "</span>");
}
private void buildCalendarBody() {
// date actually selected?
Date currentDate = datefield.getCurrentDate();
Date showing = datefield.getShowingDate();
boolean selected = (currentDate != null
&& currentDate.getMonth() == showing.getMonth() && currentDate
.getYear() == showing.getYear());
final int startWeekDay = datefield.getDateTimeService()
.getStartWeekDay(datefield.getShowingDate());
final int numDays = DateTimeService.getNumberOfDaysInMonth(datefield
.getShowingDate());
int dayCount = 0;
final Date today = new Date();
final Date curr = new Date(datefield.getShowingDate().getTime());
for (int row = 2; row < 8; row++) {
for (int col = 0; col < 7; col++) {
if (!(row == 2 && col < startWeekDay)) {
if (dayCount < numDays) {
final int selectedDate = ++dayCount;
String title = "";
if (entrySource != null) {
curr.setDate(dayCount);
final List entries = entrySource.getEntries(curr,
VDateField.RESOLUTION_DAY);
if (entries != null) {
for (final Iterator it = entries.iterator(); it
.hasNext();) {
final CalendarEntry entry = (CalendarEntry) it
.next();
title += (title.length() > 0 ? ", " : "")
+ entry.getStringForDate(curr);
}
}
}
final String baseclass = VDateField.CLASSNAME
+ "-calendarpanel-day";
String cssClass = baseclass;
if (!isEnabledDate(curr)) {
cssClass += " " + baseclass + "-disabled";
}
if (selected
&& datefield.getShowingDate().getDate() == dayCount) {
cssClass += " " + baseclass + "-selected";
}
if (today.getDate() == dayCount
&& today.getMonth() == datefield
.getShowingDate().getMonth()
&& today.getYear() == datefield
.getShowingDate().getYear()) {
cssClass += " " + baseclass + "-today";
}
if (title.length() > 0) {
cssClass += " " + baseclass + "-entry";
}
setHTML(row, col, "<span title=\"" + title
+ "\" class=\"" + cssClass + "\">"
+ selectedDate + "</span>");
} else {
break;
}
}
}
}
}
private void buildTime(boolean forceRedraw) {
if (time == null) {
time = new VTime(datefield);
setText(8, 0, ""); // Add new row
getFlexCellFormatter().setColSpan(8, 0, 7);
setWidget(8, 0, time);
}
time.updateTime(forceRedraw);
}
/**
*
* @param forceRedraw
* Build all from scratch, in case of e.g. locale changes
*/
public void updateCalendar() {
// Locale and resolution changes force a complete redraw
buildCalendar(locale != datefield.getCurrentLocale()
|| resolution != datefield.getCurrentResolution());
if (datefield instanceof VTextualDate) {
((VTextualDate) datefield).buildDate();
}
locale = datefield.getCurrentLocale();
resolution = datefield.getCurrentResolution();
}
private boolean isEnabledDate(Date date) {
if ((minDate != null && date.before(minDate))
|| (maxDate != null && date.after(maxDate))) {
return false;
}
return true;
}
private void processClickEvent(Widget sender, boolean updateVariable) {
if (!datefield.isEnabled() || datefield.isReadonly()) {
return;
}
Date showingDate = datefield.getShowingDate();
if (!updateVariable) {
if (sender == prevYear) {
showingDate.setYear(showingDate.getYear() - 1);
updateCalendar();
} else if (sender == nextYear) {
showingDate.setYear(showingDate.getYear() + 1);
updateCalendar();
} else if (sender == prevMonth) {
int currentMonth = showingDate.getMonth();
showingDate.setMonth(currentMonth - 1);
/*
* If the selected date was e.g. 31.12 the new date would be
* 31.11 but this date is invalid so the new date will be 1.12.
* This is taken care of by decreasing the date until we have
* the correct month.
*/
while (showingDate.getMonth() == currentMonth) {
showingDate.setDate(showingDate.getDate() - 1);
}
updateCalendar();
} else if (sender == nextMonth) {
int currentMonth = showingDate.getMonth();
showingDate.setMonth(currentMonth + 1);
int requestedMonth = (currentMonth + 1) % 12;
/*
* If the selected date was e.g. 31.3 the new date would be 31.4
* but this date is invalid so the new date will be 1.5. This is
* taken care of by decreasing the date until we have the
* correct month.
*/
while (showingDate.getMonth() != requestedMonth) {
showingDate.setDate(showingDate.getDate() - 1);
}
updateCalendar();
}
} else {
if (datefield.getCurrentResolution() == VDateField.RESOLUTION_YEAR
|| datefield.getCurrentResolution() == VDateField.RESOLUTION_MONTH) {
// Due to current UI, update variable if res=year/month
datefield.setCurrentDate(new Date(showingDate.getTime()));
if (datefield.getCurrentResolution() == VDateField.RESOLUTION_MONTH) {
datefield.getClient().updateVariable(datefield.getId(),
"month", datefield.getCurrentDate().getMonth() + 1,
false);
}
datefield.getClient().updateVariable(datefield.getId(), "year",
datefield.getCurrentDate().getYear() + 1900,
datefield.isImmediate());
/* Must update the value in the textfield also */
updateCalendar();
}
}
}
private Timer timer;
public void onMouseDown(final Widget sender, int x, int y) {
// Allow user to click-n-hold for fast-forward or fast-rewind.
// Timer is first used for a 500ms delay after mousedown. After that has
// elapsed, another timer is triggered to go off every 150ms. Both
// timers are cancelled on mouseup or mouseout.
if (sender instanceof VEventButton) {
processClickEvent(sender, false);
timer = new Timer() {
@Override
public void run() {
timer = new Timer() {
@Override
public void run() {
processClickEvent(sender, false);
}
};
timer.scheduleRepeating(150);
}
};
timer.schedule(500);
}
}
public void onMouseEnter(Widget sender) {
}
public void onMouseLeave(Widget sender) {
if (timer != null) {
timer.cancel();
}
}
public void onMouseMove(Widget sender, int x, int y) {
}
public void onMouseUp(Widget sender, int x, int y) {
if (timer != null) {
timer.cancel();
}
processClickEvent(sender, true);
}
private class VEventButton extends VButton implements SourcesMouseEvents {
private MouseListenerCollection mouseListeners;
public VEventButton() {
super();
sinkEvents(Event.FOCUSEVENTS | Event.KEYEVENTS | Event.ONCLICK
| Event.MOUSEEVENTS);
}
public void addMouseListener(MouseListener listener) {
if (mouseListeners == null) {
mouseListeners = new MouseListenerCollection();
}
mouseListeners.add(listener);
}
public void removeMouseListener(MouseListener listener) {
if (mouseListeners != null) {
mouseListeners.remove(listener);
}
}
@Override
public void onBrowserEvent(Event event) {
super.onBrowserEvent(event);
switch (DOM.eventGetType(event)) {
case Event.ONMOUSEDOWN:
case Event.ONMOUSEUP:
case Event.ONMOUSEMOVE:
case Event.ONMOUSEOVER:
case Event.ONMOUSEOUT:
if (mouseListeners != null) {
mouseListeners.fireMouseEvent(this, event);
}
break;
}
}
}
private class DateClickListener implements TableListener {
private final VCalendarPanel cal;
public DateClickListener(VCalendarPanel panel) {
cal = panel;
}
public void onCellClicked(SourcesTableEvents sender, int row, int col) {
if (sender != cal || row < 2 || row > 7
|| !cal.datefield.isEnabled() || cal.datefield.isReadonly()) {
return;
}
final String text = cal.getText(row, col);
if (text.equals(" ")) {
return;
}
try {
final Integer day = new Integer(text);
final Date newDate = cal.datefield.getShowingDate();
newDate.setDate(day.intValue());
if (!isEnabledDate(newDate)) {
return;
}
if (cal.datefield.getCurrentDate() == null) {
cal.datefield.setCurrentDate(new Date(newDate.getTime()));
// Init variables with current time
datefield.getClient().updateVariable(cal.datefield.getId(),
"hour", newDate.getHours(), false);
datefield.getClient().updateVariable(cal.datefield.getId(),
"min", newDate.getMinutes(), false);
datefield.getClient().updateVariable(cal.datefield.getId(),
"sec", newDate.getSeconds(), false);
datefield.getClient().updateVariable(cal.datefield.getId(),
"msec", datefield.getMilliseconds(), false);
}
cal.datefield.getCurrentDate().setTime(newDate.getTime());
cal.datefield.getClient().updateVariable(cal.datefield.getId(),
"day", cal.datefield.getCurrentDate().getDate(), false);
cal.datefield.getClient().updateVariable(cal.datefield.getId(),
"month", cal.datefield.getCurrentDate().getMonth() + 1,
false);
cal.datefield.getClient().updateVariable(cal.datefield.getId(),
"year",
cal.datefield.getCurrentDate().getYear() + 1900,
cal.datefield.isImmediate());
if (datefield instanceof VTextualDate
&& resolution < VDateField.RESOLUTION_HOUR) {
((VToolkitOverlay) getParent()).hide();
} else {
updateCalendar();
}
} catch (final NumberFormatException e) {
// Not a number, ignore and stop here
return;
}
}
}
public void setLimits(Date min, Date max) {
if (min != null) {
final Date d = new Date(min.getTime());
d.setHours(0);
d.setMinutes(0);
d.setSeconds(1);
minDate = d;
} else {
minDate = null;
}
if (max != null) {
final Date d = new Date(max.getTime());
d.setHours(24);
d.setMinutes(59);
d.setSeconds(59);
maxDate = d;
} else {
maxDate = null;
}
}
public void setCalendarEntrySource(CalendarEntrySource entrySource) {
this.entrySource = entrySource;
}
public CalendarEntrySource getCalendarEntrySource() {
return entrySource;
}
public interface CalendarEntrySource {
public List getEntries(Date date, int resolution);
}
/**
* Sets focus to Calendar panel.
*
* @param focus
*/
public void setFocus(boolean focus) {
nextYear.setFocus(focus);
}
}
| src/com/vaadin/terminal/gwt/client/ui/VCalendarPanel.java | /*
@ITMillApache2LicenseForJavaFiles@
*/
package com.vaadin.terminal.gwt.client.ui;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.Event;
import com.google.gwt.user.client.Timer;
import com.google.gwt.user.client.ui.FlexTable;
import com.google.gwt.user.client.ui.MouseListener;
import com.google.gwt.user.client.ui.MouseListenerCollection;
import com.google.gwt.user.client.ui.SourcesMouseEvents;
import com.google.gwt.user.client.ui.SourcesTableEvents;
import com.google.gwt.user.client.ui.TableListener;
import com.google.gwt.user.client.ui.Widget;
import com.vaadin.terminal.gwt.client.DateTimeService;
import com.vaadin.terminal.gwt.client.LocaleService;
public class VCalendarPanel extends FlexTable implements MouseListener {
private final VDateField datefield;
private VEventButton prevYear;
private VEventButton nextYear;
private VEventButton prevMonth;
private VEventButton nextMonth;
private VTime time;
private Date minDate = null;
private Date maxDate = null;
private CalendarEntrySource entrySource;
/* Needed to identify resolution changes */
private int resolution = VDateField.RESOLUTION_YEAR;
/* Needed to identify locale changes */
private String locale = LocaleService.getDefaultLocale();
public VCalendarPanel(VDateField parent) {
datefield = parent;
setStyleName(VDateField.CLASSNAME + "-calendarpanel");
// buildCalendar(true);
addTableListener(new DateClickListener(this));
}
public VCalendarPanel(VDateField parent, Date min, Date max) {
datefield = parent;
setStyleName(VDateField.CLASSNAME + "-calendarpanel");
// buildCalendar(true);
addTableListener(new DateClickListener(this));
}
private void buildCalendar(boolean forceRedraw) {
final boolean needsMonth = datefield.getCurrentResolution() > VDateField.RESOLUTION_YEAR;
boolean needsBody = datefield.getCurrentResolution() >= VDateField.RESOLUTION_DAY;
final boolean needsTime = datefield.getCurrentResolution() >= VDateField.RESOLUTION_HOUR;
forceRedraw = prevYear == null ? true : forceRedraw;
buildCalendarHeader(forceRedraw, needsMonth);
clearCalendarBody(!needsBody);
if (needsBody) {
buildCalendarBody();
}
if (needsTime) {
buildTime(forceRedraw);
} else if (time != null) {
remove(time);
time = null;
}
}
private void clearCalendarBody(boolean remove) {
if (!remove) {
for (int row = 2; row < 8; row++) {
for (int col = 0; col < 7; col++) {
setHTML(row, col, " ");
}
}
} else if (getRowCount() > 2) {
while (getRowCount() > 2) {
removeRow(2);
}
}
}
private void buildCalendarHeader(boolean forceRedraw, boolean needsMonth) {
if (forceRedraw) {
if (prevMonth == null) { // Only do once
prevYear = new VEventButton();
prevYear.setHTML("«");
prevYear.setStyleName("v-button-prevyear");
nextYear = new VEventButton();
nextYear.setHTML("»");
nextYear.setStyleName("v-button-nextyear");
prevYear.addMouseListener(this);
nextYear.addMouseListener(this);
setWidget(0, 0, prevYear);
setWidget(0, 4, nextYear);
if (needsMonth) {
prevMonth = new VEventButton();
prevMonth.setHTML("‹");
prevMonth.setStyleName("v-button-prevmonth");
nextMonth = new VEventButton();
nextMonth.setHTML("›");
nextMonth.setStyleName("v-button-nextmonth");
prevMonth.addMouseListener(this);
nextMonth.addMouseListener(this);
setWidget(0, 3, nextMonth);
setWidget(0, 1, prevMonth);
}
getFlexCellFormatter().setColSpan(0, 2, 3);
getRowFormatter().addStyleName(0,
VDateField.CLASSNAME + "-calendarpanel-header");
} else if (!needsMonth) {
// Remove month traverse buttons
prevMonth.removeMouseListener(this);
nextMonth.removeMouseListener(this);
remove(prevMonth);
remove(nextMonth);
prevMonth = null;
nextMonth = null;
}
// Print weekday names
final int firstDay = datefield.getDateTimeService()
.getFirstDayOfWeek();
for (int i = 0; i < 7; i++) {
int day = i + firstDay;
if (day > 6) {
day = 0;
}
if (datefield.getCurrentResolution() > VDateField.RESOLUTION_MONTH) {
setHTML(1, i, "<strong>"
+ datefield.getDateTimeService().getShortDay(day)
+ "</strong>");
} else {
setHTML(1, i, "");
}
}
}
final String monthName = needsMonth ? datefield.getDateTimeService()
.getMonth(datefield.getShowingDate().getMonth()) : "";
final int year = datefield.getShowingDate().getYear() + 1900;
setHTML(0, 2, "<span class=\"" + VDateField.CLASSNAME
+ "-calendarpanel-month\">" + monthName + " " + year
+ "</span>");
}
private void buildCalendarBody() {
// date actually selected?
Date currentDate = datefield.getCurrentDate();
Date showing = datefield.getShowingDate();
boolean selected = (currentDate != null
&& currentDate.getMonth() == showing.getMonth() && currentDate
.getYear() == showing.getYear());
final int startWeekDay = datefield.getDateTimeService()
.getStartWeekDay(datefield.getShowingDate());
final int numDays = DateTimeService.getNumberOfDaysInMonth(datefield
.getShowingDate());
int dayCount = 0;
final Date today = new Date();
final Date curr = new Date(datefield.getShowingDate().getTime());
for (int row = 2; row < 8; row++) {
for (int col = 0; col < 7; col++) {
if (!(row == 2 && col < startWeekDay)) {
if (dayCount < numDays) {
final int selectedDate = ++dayCount;
String title = "";
if (entrySource != null) {
curr.setDate(dayCount);
final List entries = entrySource.getEntries(curr,
VDateField.RESOLUTION_DAY);
if (entries != null) {
for (final Iterator it = entries.iterator(); it
.hasNext();) {
final CalendarEntry entry = (CalendarEntry) it
.next();
title += (title.length() > 0 ? ", " : "")
+ entry.getStringForDate(curr);
}
}
}
final String baseclass = VDateField.CLASSNAME
+ "-calendarpanel-day";
String cssClass = baseclass;
if (!isEnabledDate(curr)) {
cssClass += " " + baseclass + "-disabled";
}
if (selected
&& datefield.getShowingDate().getDate() == dayCount) {
cssClass += " " + baseclass + "-selected";
}
if (today.getDate() == dayCount
&& today.getMonth() == datefield
.getShowingDate().getMonth()
&& today.getYear() == datefield
.getShowingDate().getYear()) {
cssClass += " " + baseclass + "-today";
}
if (title.length() > 0) {
cssClass += " " + baseclass + "-entry";
}
setHTML(row, col, "<span title=\"" + title
+ "\" class=\"" + cssClass + "\">"
+ selectedDate + "</span>");
} else {
break;
}
}
}
}
}
private void buildTime(boolean forceRedraw) {
if (time == null) {
time = new VTime(datefield);
setText(8, 0, ""); // Add new row
getFlexCellFormatter().setColSpan(8, 0, 7);
setWidget(8, 0, time);
}
time.updateTime(forceRedraw);
}
/**
*
* @param forceRedraw
* Build all from scratch, in case of e.g. locale changes
*/
public void updateCalendar() {
// Locale and resolution changes force a complete redraw
buildCalendar(locale != datefield.getCurrentLocale()
|| resolution != datefield.getCurrentResolution());
if (datefield instanceof VTextualDate) {
((VTextualDate) datefield).buildDate();
}
locale = datefield.getCurrentLocale();
resolution = datefield.getCurrentResolution();
}
private boolean isEnabledDate(Date date) {
if ((minDate != null && date.before(minDate))
|| (maxDate != null && date.after(maxDate))) {
return false;
}
return true;
}
private void processClickEvent(Widget sender, boolean updateVariable) {
if (!datefield.isEnabled() || datefield.isReadonly()) {
return;
}
Date showingDate = datefield.getShowingDate();
if (!updateVariable) {
if (sender == prevYear) {
showingDate.setYear(showingDate.getYear() - 1);
updateCalendar();
} else if (sender == nextYear) {
showingDate.setYear(showingDate.getYear() + 1);
updateCalendar();
} else if (sender == prevMonth) {
int currentMonth = showingDate.getMonth();
showingDate.setMonth(currentMonth - 1);
/*
* If the selected date was e.g. 31.12 the new date would be
* 31.11 but this date is invalid so the new date will be 1.12.
* This is taken care of by decreasing the date until we have
* the correct month.
*/
while (showingDate.getMonth() == currentMonth) {
showingDate.setDate(showingDate.getDate() - 1);
}
updateCalendar();
} else if (sender == nextMonth) {
int currentMonth = showingDate.getMonth();
showingDate.setMonth(currentMonth + 1);
int requestedMonth = (currentMonth + 1) % 12;
/*
* If the selected date was e.g. 31.3 the new date would be 31.4
* but this date is invalid so the new date will be 1.5. This is
* taken care of by decreasing the date until we have the
* correct month.
*/
while (showingDate.getMonth() != requestedMonth) {
showingDate.setDate(showingDate.getDate() - 1);
}
updateCalendar();
}
} else {
if (datefield.getCurrentResolution() == VDateField.RESOLUTION_YEAR
|| datefield.getCurrentResolution() == VDateField.RESOLUTION_MONTH) {
// Due to current UI, update variable if res=year/month
datefield.setCurrentDate(new Date(showingDate.getTime()));
if (datefield.getCurrentResolution() == VDateField.RESOLUTION_MONTH) {
datefield.getClient().updateVariable(datefield.getId(),
"month", datefield.getCurrentDate().getMonth() + 1,
false);
}
datefield.getClient().updateVariable(datefield.getId(), "year",
datefield.getCurrentDate().getYear() + 1900,
datefield.isImmediate());
/* Must update the value in the textfield also */
updateCalendar();
}
}
}
private Timer timer;
public void onMouseDown(final Widget sender, int x, int y) {
// Allow user to click-n-hold for fast-forward or fast-rewind.
// Timer is first used for a 500ms delay after mousedown. After that has
// elapsed, another timer is triggered to go off every 150ms. Both
// timers are cancelled on mouseup or mouseout.
if (sender instanceof VEventButton) {
processClickEvent(sender, false);
timer = new Timer() {
@Override
public void run() {
timer = new Timer() {
@Override
public void run() {
processClickEvent(sender, false);
}
};
timer.scheduleRepeating(150);
}
};
timer.schedule(500);
}
}
public void onMouseEnter(Widget sender) {
}
public void onMouseLeave(Widget sender) {
if (timer != null) {
timer.cancel();
}
}
public void onMouseMove(Widget sender, int x, int y) {
}
public void onMouseUp(Widget sender, int x, int y) {
if (timer != null) {
timer.cancel();
}
processClickEvent(sender, true);
}
private class VEventButton extends VButton implements SourcesMouseEvents {
private MouseListenerCollection mouseListeners;
public VEventButton() {
super();
sinkEvents(Event.FOCUSEVENTS | Event.KEYEVENTS | Event.ONCLICK
| Event.MOUSEEVENTS);
}
@Override
public void addMouseListener(MouseListener listener) {
if (mouseListeners == null) {
mouseListeners = new MouseListenerCollection();
}
mouseListeners.add(listener);
}
@Override
public void removeMouseListener(MouseListener listener) {
if (mouseListeners != null) {
mouseListeners.remove(listener);
}
}
@Override
public void onBrowserEvent(Event event) {
super.onBrowserEvent(event);
switch (DOM.eventGetType(event)) {
case Event.ONMOUSEDOWN:
case Event.ONMOUSEUP:
case Event.ONMOUSEMOVE:
case Event.ONMOUSEOVER:
case Event.ONMOUSEOUT:
if (mouseListeners != null) {
mouseListeners.fireMouseEvent(this, event);
}
break;
}
}
}
private class DateClickListener implements TableListener {
private final VCalendarPanel cal;
public DateClickListener(VCalendarPanel panel) {
cal = panel;
}
public void onCellClicked(SourcesTableEvents sender, int row, int col) {
if (sender != cal || row < 2 || row > 7
|| !cal.datefield.isEnabled() || cal.datefield.isReadonly()) {
return;
}
final String text = cal.getText(row, col);
if (text.equals(" ")) {
return;
}
try {
final Integer day = new Integer(text);
final Date newDate = cal.datefield.getShowingDate();
newDate.setDate(day.intValue());
if (!isEnabledDate(newDate)) {
return;
}
if (cal.datefield.getCurrentDate() == null) {
cal.datefield.setCurrentDate(new Date(newDate.getTime()));
// Init variables with current time
datefield.getClient().updateVariable(cal.datefield.getId(),
"hour", newDate.getHours(), false);
datefield.getClient().updateVariable(cal.datefield.getId(),
"min", newDate.getMinutes(), false);
datefield.getClient().updateVariable(cal.datefield.getId(),
"sec", newDate.getSeconds(), false);
datefield.getClient().updateVariable(cal.datefield.getId(),
"msec", datefield.getMilliseconds(), false);
}
cal.datefield.getCurrentDate().setTime(newDate.getTime());
cal.datefield.getClient().updateVariable(cal.datefield.getId(),
"day", cal.datefield.getCurrentDate().getDate(), false);
cal.datefield.getClient().updateVariable(cal.datefield.getId(),
"month", cal.datefield.getCurrentDate().getMonth() + 1,
false);
cal.datefield.getClient().updateVariable(cal.datefield.getId(),
"year",
cal.datefield.getCurrentDate().getYear() + 1900,
cal.datefield.isImmediate());
if (datefield instanceof VTextualDate
&& resolution < VDateField.RESOLUTION_HOUR) {
((VToolkitOverlay) getParent()).hide();
} else {
updateCalendar();
}
} catch (final NumberFormatException e) {
// Not a number, ignore and stop here
return;
}
}
}
public void setLimits(Date min, Date max) {
if (min != null) {
final Date d = new Date(min.getTime());
d.setHours(0);
d.setMinutes(0);
d.setSeconds(1);
minDate = d;
} else {
minDate = null;
}
if (max != null) {
final Date d = new Date(max.getTime());
d.setHours(24);
d.setMinutes(59);
d.setSeconds(59);
maxDate = d;
} else {
maxDate = null;
}
}
public void setCalendarEntrySource(CalendarEntrySource entrySource) {
this.entrySource = entrySource;
}
public CalendarEntrySource getCalendarEntrySource() {
return entrySource;
}
public interface CalendarEntrySource {
public List getEntries(Date date, int resolution);
}
/**
* Sets focus to Calendar panel.
*
* @param focus
*/
public void setFocus(boolean focus) {
nextYear.setFocus(focus);
}
}
| Extra @Override annotations caused errors, removed.
svn changeset:7754/svn branch:6.0
| src/com/vaadin/terminal/gwt/client/ui/VCalendarPanel.java | Extra @Override annotations caused errors, removed. |
|
Java | apache-2.0 | 8b8512311239cfd2b086827a406de636c6f0e197 | 0 | bmaupin/android-sms-plus | /*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.mms.data;
import java.util.List;
import com.android.mms.MmsConfig;
import com.android.mms.ExceedMessageSizeException;
import com.android.mms.ResolutionException;
import com.android.mms.UnsupportContentTypeException;
import com.android.mms.model.AudioModel;
import com.android.mms.model.ImageModel;
import com.android.mms.model.MediaModel;
import com.android.mms.model.SlideModel;
import com.android.mms.model.SlideshowModel;
import com.android.mms.model.TextModel;
import com.android.mms.model.VideoModel;
import com.android.mms.transaction.MessageSender;
import com.android.mms.transaction.MmsMessageSender;
import com.android.mms.util.Recycler;
import com.android.mms.transaction.SmsMessageSender;
import com.android.mms.ui.ComposeMessageActivity;
import com.google.android.mms.ContentType;
import com.google.android.mms.MmsException;
import com.google.android.mms.pdu.EncodedStringValue;
import com.google.android.mms.pdu.PduBody;
import com.google.android.mms.pdu.PduPersister;
import com.google.android.mms.pdu.SendReq;
import com.google.android.mms.util.SqliteWrapper;
import android.content.ContentResolver;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.provider.Telephony.Mms;
import android.provider.Telephony.Sms;
import android.telephony.SmsMessage;
import android.text.TextUtils;
import android.util.Log;
/**
* Contains all state related to a message being edited by the user.
*/
public class WorkingMessage {
private static final String TAG = "WorkingMessage";
private static final boolean DEBUG = false;
// Database access stuff
private final Context mContext;
private final ContentResolver mContentResolver;
// States that can require us to save or send a message as MMS.
private static final int RECIPIENTS_REQUIRE_MMS = (1 << 0); // 1
private static final int HAS_SUBJECT = (1 << 1); // 2
private static final int HAS_ATTACHMENT = (1 << 2); // 4
private static final int LENGTH_REQUIRES_MMS = (1 << 3); // 8
private static final int FORCE_MMS = (1 << 4); // 16
// A bitmap of the above indicating different properties of the message;
// any bit set will require the message to be sent via MMS.
private int mMmsState;
// Errors from setAttachment()
public static final int OK = 0;
public static final int UNKNOWN_ERROR = -1;
public static final int MESSAGE_SIZE_EXCEEDED = -2;
public static final int UNSUPPORTED_TYPE = -3;
public static final int IMAGE_TOO_LARGE = -4;
// Attachment types
public static final int TEXT = 0;
public static final int IMAGE = 1;
public static final int VIDEO = 2;
public static final int AUDIO = 3;
public static final int SLIDESHOW = 4;
// Current attachment type of the message; one of the above values.
private int mAttachmentType;
// Conversation this message is targeting.
private Conversation mConversation;
// Text of the message.
private CharSequence mText;
// Slideshow for this message, if applicable. If it's a simple attachment,
// i.e. not SLIDESHOW, it will contain only one slide.
private SlideshowModel mSlideshow;
// Data URI of an MMS message if we have had to save it.
private Uri mMessageUri;
// MMS subject line for this message
private CharSequence mSubject;
// Set to true if this message has been discarded.
private boolean mDiscarded = false;
// Our callback interface
private final MessageStatusListener mStatusListener;
private List<String> mWorkingRecipients;
/**
* Callback interface for communicating important state changes back to
* ComposeMessageActivity.
*/
public interface MessageStatusListener {
/**
* Called when the protocol for sending the message changes from SMS
* to MMS, and vice versa.
*
* @param mms If true, it changed to MMS. If false, to SMS.
*/
void onProtocolChanged(boolean mms);
/**
* Called when an attachment on the message has changed.
*/
void onAttachmentChanged();
/**
* Called once the process of sending a message, triggered by
* {@link send} has completed. This doesn't mean the send succeeded,
* just that it has been dispatched to the network.
*/
void onMessageSent();
}
private WorkingMessage(ComposeMessageActivity activity) {
mContext = activity;
mContentResolver = mContext.getContentResolver();
mStatusListener = activity;
mAttachmentType = TEXT;
mText = "";
}
/**
* Creates a new working message.
*/
public static WorkingMessage createEmpty(ComposeMessageActivity activity) {
// Make a new empty working message.
WorkingMessage msg = new WorkingMessage(activity);
return msg;
}
/**
* Create a new WorkingMessage from the specified data URI, which typically
* contains an MMS message.
*/
public static WorkingMessage load(ComposeMessageActivity activity, Uri uri) {
// If the message is not already in the draft box, move it there.
if (!uri.toString().startsWith(Mms.Draft.CONTENT_URI.toString())) {
PduPersister persister = PduPersister.getPduPersister(activity);
if (DEBUG) debug("load: moving %s to drafts", uri);
try {
uri = persister.move(uri, Mms.Draft.CONTENT_URI);
} catch (MmsException e) {
error("Can't move %s to drafts", uri);
return null;
}
}
WorkingMessage msg = new WorkingMessage(activity);
if (msg.loadFromUri(uri)) {
return msg;
}
return null;
}
private void correctAttachmentState() {
int slideCount = mSlideshow.size();
// If we get an empty slideshow, tear down all MMS
// state and discard the unnecessary message Uri.
if (slideCount == 0) {
mAttachmentType = TEXT;
mSlideshow = null;
asyncDelete(mMessageUri, null, null);
mMessageUri = null;
} else if (slideCount > 1) {
mAttachmentType = SLIDESHOW;
} else {
SlideModel slide = mSlideshow.get(0);
if (slide.hasImage()) {
mAttachmentType = IMAGE;
} else if (slide.hasVideo()) {
mAttachmentType = VIDEO;
} else if (slide.hasAudio()) {
mAttachmentType = AUDIO;
}
}
updateState(HAS_ATTACHMENT, hasAttachment(), false);
}
private boolean loadFromUri(Uri uri) {
if (DEBUG) debug("loadFromUri %s", uri);
try {
mSlideshow = SlideshowModel.createFromMessageUri(mContext, uri);
} catch (MmsException e) {
error("Couldn't load URI %s", uri);
return false;
}
mMessageUri = uri;
// Make sure all our state is as expected.
syncTextFromSlideshow();
correctAttachmentState();
return true;
}
/**
* Load the draft message for the specified conversation, or a new empty message if
* none exists.
*/
public static WorkingMessage loadDraft(ComposeMessageActivity activity,
Conversation conv) {
WorkingMessage msg = new WorkingMessage(activity);
if (msg.loadFromConversation(conv)) {
return msg;
} else {
return createEmpty(activity);
}
}
private boolean loadFromConversation(Conversation conv) {
if (DEBUG) debug("loadFromConversation %s", conv);
long threadId = conv.getThreadId();
if (threadId <= 0) {
return false;
}
// Look for an SMS draft first.
mText = readDraftSmsMessage(mContext, threadId, conv);
if (!TextUtils.isEmpty(mText)) {
return true;
}
// Then look for an MMS draft.
StringBuilder sb = new StringBuilder();
Uri uri = readDraftMmsMessage(mContext, threadId, sb);
if (uri != null) {
if (loadFromUri(uri)) {
// If there was an MMS message, readDraftMmsMessage
// will put the subject in our supplied StringBuilder.
if (sb.length() > 0) {
setSubject(sb.toString());
}
return true;
}
}
return false;
}
/**
* Sets the text of the message to the specified CharSequence.
*/
public void setText(CharSequence s) {
mText = s;
}
/**
* Returns the current message text.
*/
public CharSequence getText() {
return mText;
}
/**
* Returns true if the message has any text.
* @return
*/
public boolean hasText() {
return !TextUtils.isEmpty(mText);
}
/**
* Adds an attachment to the message, replacing an old one if it existed.
* @param type Type of this attachment, such as {@link IMAGE}
* @param dataUri Uri containing the attachment data (or null for {@link TEXT})
* @return An error code such as {@link UNKNOWN_ERROR} or {@link OK} if successful
*/
public int setAttachment(int type, Uri dataUri) {
if (DEBUG) debug("setAttachment type=%d uri %s", type, dataUri);
int result = OK;
// Make sure mSlideshow is set up and has a slide.
ensureSlideshow();
// Change the attachment and translate the various underlying
// exceptions into useful error codes.
try {
changeMedia(type, dataUri);
} catch (MmsException e) {
result = UNKNOWN_ERROR;
} catch (UnsupportContentTypeException e) {
result = UNSUPPORTED_TYPE;
} catch (ExceedMessageSizeException e) {
result = MESSAGE_SIZE_EXCEEDED;
} catch (ResolutionException e) {
result = IMAGE_TOO_LARGE;
}
// If we were successful, update mAttachmentType and notify
// the listener than there was a change.
if (result == OK) {
mAttachmentType = type;
mStatusListener.onAttachmentChanged();
}
// Set HAS_ATTACHMENT if we need it.
updateState(HAS_ATTACHMENT, hasAttachment(), true);
return result;
}
/**
* Returns true if this message contains anything worth saving.
*/
public boolean isWorthSaving() {
// If it actually contains anything, it's of course not empty.
if (hasText() || hasSubject() || hasAttachment() || hasSlideshow()) {
return true;
}
// When saveAsMms() has been called, we set FORCE_MMS to represent
// sort of an "invisible attachment" so that the message isn't thrown
// away when we are shipping it off to other activities.
if ((mMmsState & FORCE_MMS) > 0) {
return true;
}
return false;
}
/**
* Makes sure mSlideshow is set up.
*/
private void ensureSlideshow() {
if (mSlideshow != null) {
return;
}
SlideshowModel slideshow = SlideshowModel.createNew(mContext);
SlideModel slide = new SlideModel(slideshow);
slideshow.add(slide);
mSlideshow = slideshow;
}
/**
* Change the message's attachment to the data in the specified Uri.
* Used only for single-slide ("attachment mode") messages.
*/
private void changeMedia(int type, Uri uri) throws MmsException {
SlideModel slide = mSlideshow.get(0);
MediaModel media;
// Remove any previous attachments.
slide.removeImage();
slide.removeVideo();
slide.removeAudio();
// If we're changing to text, just bail out.
if (type == TEXT) {
return;
}
// Make a correct MediaModel for the type of attachment.
if (type == IMAGE) {
media = new ImageModel(mContext, uri, mSlideshow.getLayout().getImageRegion());
} else if (type == VIDEO) {
media = new VideoModel(mContext, uri, mSlideshow.getLayout().getImageRegion());
} else if (type == AUDIO) {
media = new AudioModel(mContext, uri);
} else {
throw new IllegalArgumentException("changeMedia type=" + type + ", uri=" + uri);
}
// Add it to the slide.
slide.add(media);
// For video and audio, set the duration of the slide to
// that of the attachment.
if (type == VIDEO || type == AUDIO) {
slide.updateDuration(media.getDuration());
}
}
/**
* Returns true if the message has an attachment (including slideshows).
*/
public boolean hasAttachment() {
return (mAttachmentType > TEXT);
}
/**
* Returns the slideshow associated with this message.
*/
public SlideshowModel getSlideshow() {
return mSlideshow;
}
/**
* Returns true if the message has a real slideshow, as opposed to just
* one image attachment, for example.
*/
public boolean hasSlideshow() {
return (mAttachmentType == SLIDESHOW);
}
/**
* Sets the MMS subject of the message. Passing null indicates that there
* is no subject. Passing "" will result in an empty subject being added
* to the message, possibly triggering a conversion to MMS. This extra
* bit of state is needed to support ComposeMessageActivity converting to
* MMS when the user adds a subject. An empty subject will be removed
* before saving to disk or sending, however.
*/
public void setSubject(CharSequence s) {
mSubject = s;
updateState(HAS_SUBJECT, (s != null), true);
}
/**
* Returns the MMS subject of the message.
*/
public CharSequence getSubject() {
return mSubject;
}
/**
* Returns true if this message has an MMS subject.
* @return
*/
public boolean hasSubject() {
return !TextUtils.isEmpty(mSubject);
}
/**
* Moves the message text into the slideshow. Should be called any time
* the message is about to be sent or written to disk.
*/
private void syncTextToSlideshow() {
if (mSlideshow == null || mSlideshow.size() != 1)
return;
SlideModel slide = mSlideshow.get(0);
TextModel text;
if (!slide.hasText()) {
// Add a TextModel to slide 0 if one doesn't already exist
text = new TextModel(mContext, ContentType.TEXT_PLAIN, "text_0.txt",
mSlideshow.getLayout().getTextRegion());
slide.add(text);
} else {
// Otherwise just reuse the existing one.
text = slide.getText();
}
text.setText(mText);
}
/**
* Sets the message text out of the slideshow. Should be called any time
* a slideshow is loaded from disk.
*/
private void syncTextFromSlideshow() {
// Don't sync text for real slideshows.
if (mSlideshow.size() != 1) {
return;
}
SlideModel slide = mSlideshow.get(0);
if (!slide.hasText()) {
return;
}
mText = slide.getText().getText();
}
/**
* Removes the subject if it is empty, possibly converting back to SMS.
*/
private void removeSubjectIfEmpty() {
if (!hasSubject()) {
setSubject(null);
}
}
/**
* Gets internal message state ready for storage. Should be called any
* time the message is about to be sent or written to disk.
*/
private void prepareForSave() {
// Make sure our working set of recipients is resolved
// to first-class Contact objects before we save.
syncWorkingRecipients();
if (requiresMms()) {
ensureSlideshow();
syncTextToSlideshow();
removeSubjectIfEmpty();
}
}
/**
* Resolve the temporary working set of recipients to a ContactList.
*/
private void syncWorkingRecipients() {
if (mWorkingRecipients != null) {
ContactList recipients = ContactList.getByNumbers(mWorkingRecipients, false);
mConversation.setRecipients(recipients);
mWorkingRecipients = null;
}
}
/**
* Force the message to be saved as MMS and return the Uri of the message.
* Typically used when handing a message off to another activity.
*/
public Uri saveAsMms() {
if (DEBUG) debug("save mConversation=%s", mConversation);
if (mDiscarded) {
throw new IllegalStateException("save() called after discard()");
}
// FORCE_MMS behaves as sort of an "invisible attachment", making
// the message seem non-empty (and thus not discarded). This bit
// is sticky until the last other MMS bit is removed, at which
// point the message will fall back to SMS.
updateState(FORCE_MMS, true, false);
// Collect our state to be written to disk.
prepareForSave();
// Make sure we are saving to the correct thread ID.
mConversation.ensureThreadId();
mConversation.setDraftState(true);
PduPersister persister = PduPersister.getPduPersister(mContext);
SendReq sendReq = makeSendReq(mConversation, mSubject);
// If we don't already have a Uri lying around, make a new one. If we do
// have one already, make sure it is synced to disk.
if (mMessageUri == null) {
mMessageUri = createDraftMmsMessage(persister, sendReq, mSlideshow);
} else {
updateDraftMmsMessage(mMessageUri, persister, mSlideshow, sendReq);
}
return mMessageUri;
}
/**
* Save this message as a draft in the conversation previously specified
* to {@link setConversation}.
*/
public void saveDraft() {
if (DEBUG) debug("saveDraft");
// If we have discarded the message, just bail out.
if (mDiscarded) {
return;
}
// Make sure setConversation was called.
if (mConversation == null) {
throw new IllegalStateException("saveDraft() called with no conversation");
}
// Get ready to write to disk.
prepareForSave();
if (requiresMms()) {
asyncUpdateDraftMmsMessage(mConversation);
} else {
asyncUpdateDraftSmsMessage(mConversation, mText.toString());
}
// Update state of the draft cache.
mConversation.setDraftState(true);
}
public void discard() {
if (DEBUG) debug("discard");
// Technically, we could probably just bail out here. But discard() is
// really meant to be called if you never want to use the message again,
// so keep this assert in as a debugging aid.
if (mDiscarded == true) {
throw new IllegalStateException("discard() called twice");
}
// Mark this message as discarded in order to make saveDraft() no-op.
mDiscarded = true;
// Delete our MMS message, if there is one.
if (mMessageUri != null) {
asyncDelete(mMessageUri, null, null);
}
// Delete any draft messages associated with this conversation.
asyncDeleteDraftSmsMessage(mConversation);
// Update state of the draft cache.
mConversation.setDraftState(false);
}
public void unDiscard() {
if (DEBUG) debug("unDiscard");
mDiscarded = false;
}
/**
* Returns true if discard() has been called on this message.
*/
public boolean isDiscarded() {
return mDiscarded;
}
/**
* To be called from our Activity's onSaveInstanceState() to give us a chance
* to stow our state away for later retrieval.
*
* @param bundle The Bundle passed in to onSaveInstanceState
*/
public void writeStateToBundle(Bundle bundle) {
if (hasSubject()) {
bundle.putString("subject", mSubject.toString());
}
if (mMessageUri != null) {
bundle.putParcelable("msg_uri", mMessageUri);
} else if (hasText()) {
bundle.putString("sms_body", mText.toString());
}
}
/**
* To be called from our Activity's onCreate() if the activity manager
* has given it a Bundle to reinflate
* @param bundle The Bundle passed in to onCreate
*/
public void readStateFromBundle(Bundle bundle) {
if (bundle == null) {
return;
}
String subject = bundle.getString("subject");
setSubject(subject);
Uri uri = (Uri)bundle.getParcelable("msg_uri");
if (uri != null) {
loadFromUri(uri);
return;
} else {
String body = bundle.getString("sms_body");
mText = body;
}
}
/**
* Update the temporary list of recipients, used when setting up a
* new conversation. Will be converted to a ContactList on any
* save event (send, save draft, etc.)
*/
public void setWorkingRecipients(List<String> numbers) {
mWorkingRecipients = numbers;
}
/**
* Set the conversation associated with this message.
*/
public void setConversation(Conversation conv) {
if (DEBUG) debug("setConversation %s -> %s", mConversation, conv);
mConversation = conv;
// Convert to MMS if there are any email addresses in the recipient list.
setHasEmail(conv.getRecipients().containsEmail());
}
/**
* Hint whether or not this message will be delivered to an
* an email address.
*/
public void setHasEmail(boolean hasEmail) {
if (MmsConfig.getEmailGateway() != null) {
updateState(RECIPIENTS_REQUIRE_MMS, false, true);
} else {
updateState(RECIPIENTS_REQUIRE_MMS, hasEmail, true);
}
}
/**
* Returns true if this message would require MMS to send.
*/
public boolean requiresMms() {
return (mMmsState > 0);
}
/**
* Set whether or not we want to send this message via MMS in order to
* avoid sending an excessive number of concatenated SMS messages.
*/
public void setLengthRequiresMms(boolean mmsRequired) {
updateState(LENGTH_REQUIRES_MMS, mmsRequired, true);
}
private static String stateString(int state) {
if (state == 0)
return "<none>";
StringBuilder sb = new StringBuilder();
if ((state & RECIPIENTS_REQUIRE_MMS) > 0)
sb.append("RECIPIENTS_REQUIRE_MMS | ");
if ((state & HAS_SUBJECT) > 0)
sb.append("HAS_SUBJECT | ");
if ((state & HAS_ATTACHMENT) > 0)
sb.append("HAS_ATTACHMENT | ");
if ((state & LENGTH_REQUIRES_MMS) > 0)
sb.append("LENGTH_REQUIRES_MMS | ");
if ((state & FORCE_MMS) > 0)
sb.append("FORCE_MMS | ");
sb.delete(sb.length() - 3, sb.length());
return sb.toString();
}
/**
* Sets the current state of our various "MMS required" bits.
*
* @param state The bit to change, such as {@link HAS_ATTACHMENT}
* @param on If true, set it; if false, clear it
* @param notify Whether or not to notify the user
*/
private void updateState(int state, boolean on, boolean notify) {
int oldState = mMmsState;
if (on) {
mMmsState |= state;
} else {
mMmsState &= ~state;
}
// If we are clearing the last bit that is not FORCE_MMS,
// expire the FORCE_MMS bit.
if (mMmsState == FORCE_MMS && ((oldState & ~FORCE_MMS) > 0)) {
mMmsState = 0;
}
// Notify the listener if we are moving from SMS to MMS
// or vice versa.
if (notify) {
if (oldState == 0 && mMmsState != 0) {
mStatusListener.onProtocolChanged(true);
} else if (oldState != 0 && mMmsState == 0) {
mStatusListener.onProtocolChanged(false);
}
}
if (oldState != mMmsState) {
if (DEBUG) debug("updateState: %s%s = %s", on ? "+" : "-",
stateString(state), stateString(mMmsState));
}
}
/**
* Send this message over the network. Will call back with onMessageSent() once
* it has been dispatched to the telephony stack. This WorkingMessage object is
* no longer useful after this method has been called.
*/
public void send() {
if (DEBUG) debug("send");
// Get ready to write to disk.
prepareForSave();
// We need the recipient list for both SMS and MMS.
final Conversation conv = mConversation;
String msgTxt = mText.toString();
if (requiresMms() || addressContainsEmailToMms(conv, msgTxt)) {
// Make local copies of the bits we need for sending a message,
// because we will be doing it off of the main thread, which will
// immediately continue on to resetting some of this state.
final Uri mmsUri = mMessageUri;
final PduPersister persister = PduPersister.getPduPersister(mContext);
final SlideshowModel slideshow = mSlideshow;
final SendReq sendReq = makeSendReq(conv, mSubject);
// Make sure the text in slide 0 is no longer holding onto a reference to the text
// in the message text box.
slideshow.prepareForSend();
// Do the dirty work of sending the message off of the main UI thread.
new Thread(new Runnable() {
public void run() {
sendMmsWorker(conv, mmsUri, persister, slideshow, sendReq);
}
}).start();
} else {
// Same rules apply as above.
final String msgText = mText.toString();
new Thread(new Runnable() {
public void run() {
sendSmsWorker(conv, msgText);
}
}).start();
}
// Mark the message as discarded because it is "off the market" after being sent.
mDiscarded = true;
}
private boolean addressContainsEmailToMms(Conversation conv, String text) {
if (MmsConfig.getEmailGateway() != null) {
String[] dests = conv.getRecipients().getNumbers();
int length = dests.length;
for (int i = 0; i < length; i++) {
if (Mms.isEmailAddress(dests[i])) {
String mtext = dests[i] + " " + text;
int[] params = SmsMessage.calculateLength(mtext, false);
if (params[0] > 1) {
updateState(RECIPIENTS_REQUIRE_MMS, true, true);
ensureSlideshow();
syncTextToSlideshow();
return true;
}
}
}
}
return false;
}
// Message sending stuff
private void sendSmsWorker(Conversation conv, String msgText) {
// Make sure we are still using the correct thread ID for our
// recipient set.
long threadId = conv.ensureThreadId();
String[] dests = conv.getRecipients().getNumbers();
MessageSender sender = new SmsMessageSender(mContext, dests, msgText, threadId);
try {
sender.sendMessage(threadId);
// Make sure this thread isn't over the limits in message count
Recycler.getSmsRecycler().deleteOldMessagesByThreadId(mContext, threadId);
} catch (Exception e) {
Log.e(TAG, "Failed to send SMS message, threadId=" + threadId, e);
}
mStatusListener.onMessageSent();
}
private void sendMmsWorker(Conversation conv, Uri mmsUri, PduPersister persister,
SlideshowModel slideshow, SendReq sendReq) {
// Make sure we are still using the correct thread ID for our
// recipient set.
long threadId = conv.ensureThreadId();
if (DEBUG) debug("sendMmsWorker: update draft MMS message " + mmsUri);
if (mmsUri == null) {
// Create a new MMS message if one hasn't been made yet.
mmsUri = createDraftMmsMessage(persister, sendReq, slideshow);
} else {
// Otherwise, sync the MMS message in progress to disk.
updateDraftMmsMessage(mmsUri, persister, slideshow, sendReq);
}
// Be paranoid and clean any draft SMS up.
deleteDraftSmsMessage(threadId);
MessageSender sender = new MmsMessageSender(mContext, mmsUri);
try {
if (!sender.sendMessage(threadId)) {
// The message was sent through SMS protocol, we should
// delete the copy which was previously saved in MMS drafts.
SqliteWrapper.delete(mContext, mContentResolver, mmsUri, null, null);
}
// Make sure this thread isn't over the limits in message count
Recycler.getMmsRecycler().deleteOldMessagesByThreadId(mContext, threadId);
} catch (Exception e) {
Log.e(TAG, "Failed to send message: " + mmsUri + ", threadId=" + threadId, e);
}
mStatusListener.onMessageSent();
}
// Draft message stuff
private static final String[] MMS_DRAFT_PROJECTION = {
Mms._ID, // 0
Mms.SUBJECT // 1
};
private static final int MMS_ID_INDEX = 0;
private static final int MMS_SUBJECT_INDEX = 1;
private static Uri readDraftMmsMessage(Context context, long threadId, StringBuilder sb) {
if (DEBUG) debug("readDraftMmsMessage tid=%d", threadId);
Cursor cursor;
ContentResolver cr = context.getContentResolver();
final String selection = Mms.THREAD_ID + " = " + threadId;
cursor = SqliteWrapper.query(context, cr,
Mms.Draft.CONTENT_URI, MMS_DRAFT_PROJECTION,
selection, null, null);
Uri uri;
try {
if (cursor.moveToFirst()) {
uri = ContentUris.withAppendedId(Mms.Draft.CONTENT_URI,
cursor.getLong(MMS_ID_INDEX));
String subject = cursor.getString(MMS_SUBJECT_INDEX);
if (subject != null) {
sb.append(subject);
}
return uri;
}
} finally {
cursor.close();
}
return null;
}
private static SendReq makeSendReq(Conversation conv, CharSequence subject) {
String[] dests = conv.getRecipients().getNumbers();
SendReq req = new SendReq();
EncodedStringValue[] encodedNumbers = EncodedStringValue.encodeStrings(dests);
if (encodedNumbers != null) {
req.setTo(encodedNumbers);
}
if (!TextUtils.isEmpty(subject)) {
req.setSubject(new EncodedStringValue(subject.toString()));
}
req.setDate(System.currentTimeMillis() / 1000L);
return req;
}
private static Uri createDraftMmsMessage(PduPersister persister, SendReq sendReq,
SlideshowModel slideshow) {
try {
PduBody pb = slideshow.toPduBody();
sendReq.setBody(pb);
Uri res = persister.persist(sendReq, Mms.Draft.CONTENT_URI);
slideshow.sync(pb);
return res;
} catch (MmsException e) {
return null;
}
}
private void asyncUpdateDraftMmsMessage(final Conversation conv) {
if (DEBUG) debug("asyncUpdateDraftMmsMessage conv=%s mMessageUri=%s", conv, mMessageUri);
final PduPersister persister = PduPersister.getPduPersister(mContext);
final SendReq sendReq = makeSendReq(conv, mSubject);
new Thread(new Runnable() {
public void run() {
conv.ensureThreadId();
conv.setDraftState(true);
if (mMessageUri == null) {
mMessageUri = createDraftMmsMessage(persister, sendReq, mSlideshow);
} else {
updateDraftMmsMessage(mMessageUri, persister, mSlideshow, sendReq);
}
}
}).start();
// Be paranoid and delete any SMS drafts that might be lying around.
asyncDeleteDraftSmsMessage(conv);
}
private static void updateDraftMmsMessage(Uri uri, PduPersister persister,
SlideshowModel slideshow, SendReq sendReq) {
if (DEBUG) debug("updateDraftMmsMessage uri=%s", uri);
persister.updateHeaders(uri, sendReq);
final PduBody pb = slideshow.toPduBody();
try {
persister.updateParts(uri, pb);
} catch (MmsException e) {
Log.e(TAG, "updateDraftMmsMessage: cannot update message " + uri);
}
slideshow.sync(pb);
}
private static final String SMS_DRAFT_WHERE = Sms.TYPE + "=" + Sms.MESSAGE_TYPE_DRAFT;
private static final String[] SMS_BODY_PROJECTION = { Sms.BODY };
private static final int SMS_BODY_INDEX = 0;
/**
* Reads a draft message for the given thread ID from the database,
* if there is one, deletes it from the database, and returns it.
* @return The draft message or an empty string.
*/
private static String readDraftSmsMessage(Context context, long thread_id, Conversation conv) {
if (DEBUG) debug("readDraftSmsMessage tid=%d", thread_id);
ContentResolver cr = context.getContentResolver();
// If it's an invalid thread, don't bother.
if (thread_id <= 0) {
return "";
}
Uri thread_uri = ContentUris.withAppendedId(Sms.Conversations.CONTENT_URI, thread_id);
String body = "";
Cursor c = SqliteWrapper.query(context, cr,
thread_uri, SMS_BODY_PROJECTION, SMS_DRAFT_WHERE, null, null);
try {
if (c.moveToFirst()) {
body = c.getString(SMS_BODY_INDEX);
}
} finally {
c.close();
}
// Clean out drafts for this thread -- if the recipient set changes,
// we will lose track of the original draft and be unable to delete
// it later. The message will be re-saved if necessary upon exit of
// the activity.
SqliteWrapper.delete(context, cr, thread_uri, SMS_DRAFT_WHERE, null);
// We found a draft, and if there are no messages in the conversation,
// that means we deleted the thread, too. Must reset the thread id
// so we'll eventually create a new thread.
if (conv.getMessageCount() == 0) {
if (DEBUG) debug("readDraftSmsMessage calling clearThreadId");
conv.clearThreadId();
}
return body;
}
private void asyncUpdateDraftSmsMessage(final Conversation conv, final String contents) {
new Thread(new Runnable() {
public void run() {
long threadId = conv.ensureThreadId();
conv.setDraftState(true);
updateDraftSmsMessage(threadId, contents);
}
}).start();
}
private void updateDraftSmsMessage(long thread_id, String contents) {
if (DEBUG) debug("updateDraftSmsMessage tid=%d, contents=\"%s\"", thread_id, contents);
// If we don't have a valid thread, there's nothing to do.
if (thread_id <= 0) {
return;
}
// Don't bother saving an empty message.
if (TextUtils.isEmpty(contents)) {
// But delete the old draft message if it's there.
deleteDraftSmsMessage(thread_id);
return;
}
ContentValues values = new ContentValues(3);
values.put(Sms.THREAD_ID, thread_id);
values.put(Sms.BODY, contents);
values.put(Sms.TYPE, Sms.MESSAGE_TYPE_DRAFT);
SqliteWrapper.insert(mContext, mContentResolver, Sms.CONTENT_URI, values);
asyncDeleteDraftMmsMessage(thread_id);
}
private void asyncDelete(final Uri uri, final String selection, final String[] selectionArgs) {
if (DEBUG) debug("asyncDelete %s where %s", uri, selection);
new Thread(new Runnable() {
public void run() {
SqliteWrapper.delete(mContext, mContentResolver, uri, selection, selectionArgs);
}
}).start();
}
private void asyncDeleteDraftSmsMessage(Conversation conv) {
long threadId = conv.getThreadId();
if (threadId > 0) {
asyncDelete(ContentUris.withAppendedId(Sms.Conversations.CONTENT_URI, threadId),
SMS_DRAFT_WHERE, null);
}
}
private void deleteDraftSmsMessage(long threadId) {
SqliteWrapper.delete(mContext, mContentResolver,
ContentUris.withAppendedId(Sms.Conversations.CONTENT_URI, threadId),
SMS_DRAFT_WHERE, null);
}
private void asyncDeleteDraftMmsMessage(long threadId) {
final String where = Mms.THREAD_ID + " = " + threadId;
asyncDelete(Mms.Draft.CONTENT_URI, where, null);
}
// Logging stuff.
private static String prettyArray(String[] array) {
if (array.length == 0) {
return "[]";
}
StringBuilder sb = new StringBuilder("[");
int len = array.length-1;
for (int i = 0; i < len; i++) {
sb.append(array[i]);
sb.append(", ");
}
sb.append(array[len]);
sb.append("]");
return sb.toString();
}
private static String logFormat(String format, Object... args) {
for (int i = 0; i < args.length; i++) {
if (args[i] instanceof String[]) {
args[i] = prettyArray((String[])args[i]);
}
}
String s = String.format(format, args);
s = "[" + Thread.currentThread().getId() + "] " + s;
return s;
}
private static void debug(String format, Object... args) {
Log.d(TAG, logFormat(format, args));
}
private static void warn(String format, Object... args) {
Log.w(TAG, logFormat(format, args));
}
private static void error(String format, Object... args) {
Log.e(TAG, logFormat(format, args));
}
}
| src/com/android/mms/data/WorkingMessage.java | /*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.mms.data;
import java.util.List;
import com.android.mms.MmsConfig;
import com.android.mms.ExceedMessageSizeException;
import com.android.mms.ResolutionException;
import com.android.mms.UnsupportContentTypeException;
import com.android.mms.model.AudioModel;
import com.android.mms.model.ImageModel;
import com.android.mms.model.MediaModel;
import com.android.mms.model.SlideModel;
import com.android.mms.model.SlideshowModel;
import com.android.mms.model.TextModel;
import com.android.mms.model.VideoModel;
import com.android.mms.transaction.MessageSender;
import com.android.mms.transaction.MmsMessageSender;
import com.android.mms.util.Recycler;
import com.android.mms.transaction.SmsMessageSender;
import com.android.mms.ui.ComposeMessageActivity;
import com.google.android.mms.ContentType;
import com.google.android.mms.MmsException;
import com.google.android.mms.pdu.EncodedStringValue;
import com.google.android.mms.pdu.PduBody;
import com.google.android.mms.pdu.PduPersister;
import com.google.android.mms.pdu.SendReq;
import com.google.android.mms.util.SqliteWrapper;
import android.content.ContentResolver;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.provider.Telephony.Mms;
import android.provider.Telephony.Sms;
import android.telephony.SmsMessage;
import android.text.TextUtils;
import android.util.Log;
/**
* Contains all state related to a message being edited by the user.
*/
public class WorkingMessage {
private static final String TAG = "WorkingMessage";
private static final boolean DEBUG = false;
// Database access stuff
private final Context mContext;
private final ContentResolver mContentResolver;
// States that can require us to save or send a message as MMS.
private static final int RECIPIENTS_REQUIRE_MMS = (1 << 0); // 1
private static final int HAS_SUBJECT = (1 << 1); // 2
private static final int HAS_ATTACHMENT = (1 << 2); // 4
private static final int LENGTH_REQUIRES_MMS = (1 << 3); // 8
private static final int FORCE_MMS = (1 << 4); // 16
// A bitmap of the above indicating different properties of the message;
// any bit set will require the message to be sent via MMS.
private int mMmsState;
// Errors from setAttachment()
public static final int OK = 0;
public static final int UNKNOWN_ERROR = -1;
public static final int MESSAGE_SIZE_EXCEEDED = -2;
public static final int UNSUPPORTED_TYPE = -3;
public static final int IMAGE_TOO_LARGE = -4;
// Attachment types
public static final int TEXT = 0;
public static final int IMAGE = 1;
public static final int VIDEO = 2;
public static final int AUDIO = 3;
public static final int SLIDESHOW = 4;
// Current attachment type of the message; one of the above values.
private int mAttachmentType;
// Conversation this message is targeting.
private Conversation mConversation;
// Text of the message.
private CharSequence mText;
// Slideshow for this message, if applicable. If it's a simple attachment,
// i.e. not SLIDESHOW, it will contain only one slide.
private SlideshowModel mSlideshow;
// Data URI of an MMS message if we have had to save it.
private Uri mMessageUri;
// MMS subject line for this message
private CharSequence mSubject;
// Set to true if this message has been discarded.
private boolean mDiscarded = false;
// Our callback interface
private final MessageStatusListener mStatusListener;
private List<String> mWorkingRecipients;
/**
* Callback interface for communicating important state changes back to
* ComposeMessageActivity.
*/
public interface MessageStatusListener {
/**
* Called when the protocol for sending the message changes from SMS
* to MMS, and vice versa.
*
* @param mms If true, it changed to MMS. If false, to SMS.
*/
void onProtocolChanged(boolean mms);
/**
* Called when an attachment on the message has changed.
*/
void onAttachmentChanged();
/**
* Called once the process of sending a message, triggered by
* {@link send} has completed. This doesn't mean the send succeeded,
* just that it has been dispatched to the network.
*/
void onMessageSent();
}
private WorkingMessage(ComposeMessageActivity activity) {
mContext = activity;
mContentResolver = mContext.getContentResolver();
mStatusListener = activity;
mAttachmentType = TEXT;
mText = "";
}
/**
* Creates a new working message.
*/
public static WorkingMessage createEmpty(ComposeMessageActivity activity) {
// Make a new empty working message.
WorkingMessage msg = new WorkingMessage(activity);
return msg;
}
/**
* Create a new WorkingMessage from the specified data URI, which typically
* contains an MMS message.
*/
public static WorkingMessage load(ComposeMessageActivity activity, Uri uri) {
// If the message is not already in the draft box, move it there.
if (!uri.toString().startsWith(Mms.Draft.CONTENT_URI.toString())) {
PduPersister persister = PduPersister.getPduPersister(activity);
if (DEBUG) debug("load: moving %s to drafts", uri);
try {
uri = persister.move(uri, Mms.Draft.CONTENT_URI);
} catch (MmsException e) {
error("Can't move %s to drafts", uri);
return null;
}
}
WorkingMessage msg = new WorkingMessage(activity);
if (msg.loadFromUri(uri)) {
return msg;
}
return null;
}
private void correctAttachmentState() {
int slideCount = mSlideshow.size();
// If we get an empty slideshow, tear down all MMS
// state and discard the unnecessary message Uri.
if (slideCount == 0) {
mAttachmentType = TEXT;
mSlideshow = null;
asyncDelete(mMessageUri, null, null);
mMessageUri = null;
} else if (slideCount > 1) {
mAttachmentType = SLIDESHOW;
} else {
SlideModel slide = mSlideshow.get(0);
if (slide.hasImage()) {
mAttachmentType = IMAGE;
} else if (slide.hasVideo()) {
mAttachmentType = VIDEO;
} else if (slide.hasAudio()) {
mAttachmentType = AUDIO;
}
}
updateState(HAS_ATTACHMENT, hasAttachment(), false);
}
private boolean loadFromUri(Uri uri) {
if (DEBUG) debug("loadFromUri %s", uri);
try {
mSlideshow = SlideshowModel.createFromMessageUri(mContext, uri);
} catch (MmsException e) {
error("Couldn't load URI %s", uri);
return false;
}
mMessageUri = uri;
// Make sure all our state is as expected.
syncTextFromSlideshow();
correctAttachmentState();
return true;
}
/**
* Load the draft message for the specified conversation, or a new empty message if
* none exists.
*/
public static WorkingMessage loadDraft(ComposeMessageActivity activity,
Conversation conv) {
WorkingMessage msg = new WorkingMessage(activity);
if (msg.loadFromConversation(conv)) {
return msg;
} else {
return createEmpty(activity);
}
}
private boolean loadFromConversation(Conversation conv) {
if (DEBUG) debug("loadFromConversation %s", conv);
long threadId = conv.getThreadId();
if (threadId <= 0) {
return false;
}
// Look for an SMS draft first.
mText = readDraftSmsMessage(mContext, threadId, conv);
if (!TextUtils.isEmpty(mText)) {
return true;
}
// Then look for an MMS draft.
StringBuilder sb = new StringBuilder();
Uri uri = readDraftMmsMessage(mContext, threadId, sb);
if (uri != null) {
if (loadFromUri(uri)) {
// If there was an MMS message, readDraftMmsMessage
// will put the subject in our supplied StringBuilder.
if (sb.length() > 0) {
setSubject(sb.toString());
}
return true;
}
}
return false;
}
/**
* Sets the text of the message to the specified CharSequence.
*/
public void setText(CharSequence s) {
mText = s;
}
/**
* Returns the current message text.
*/
public CharSequence getText() {
return mText;
}
/**
* Returns true if the message has any text.
* @return
*/
public boolean hasText() {
return !TextUtils.isEmpty(mText);
}
/**
* Adds an attachment to the message, replacing an old one if it existed.
* @param type Type of this attachment, such as {@link IMAGE}
* @param dataUri Uri containing the attachment data (or null for {@link TEXT})
* @return An error code such as {@link UNKNOWN_ERROR} or {@link OK} if successful
*/
public int setAttachment(int type, Uri dataUri) {
if (DEBUG) debug("setAttachment type=%d uri %s", type, dataUri);
int result = OK;
// Make sure mSlideshow is set up and has a slide.
ensureSlideshow();
// Change the attachment and translate the various underlying
// exceptions into useful error codes.
try {
changeMedia(type, dataUri);
} catch (MmsException e) {
result = UNKNOWN_ERROR;
} catch (UnsupportContentTypeException e) {
result = UNSUPPORTED_TYPE;
} catch (ExceedMessageSizeException e) {
result = MESSAGE_SIZE_EXCEEDED;
} catch (ResolutionException e) {
result = IMAGE_TOO_LARGE;
}
// If we were successful, update mAttachmentType and notify
// the listener than there was a change.
if (result == OK) {
mAttachmentType = type;
mStatusListener.onAttachmentChanged();
}
// Set HAS_ATTACHMENT if we need it.
updateState(HAS_ATTACHMENT, hasAttachment(), true);
return result;
}
/**
* Returns true if this message contains anything worth saving.
*/
public boolean isWorthSaving() {
// If it actually contains anything, it's of course not empty.
if (hasText() || hasSubject() || hasAttachment() || hasSlideshow()) {
return true;
}
// When saveAsMms() has been called, we set FORCE_MMS to represent
// sort of an "invisible attachment" so that the message isn't thrown
// away when we are shipping it off to other activities.
if ((mMmsState & FORCE_MMS) > 0) {
return true;
}
return false;
}
/**
* Makes sure mSlideshow is set up.
*/
private void ensureSlideshow() {
if (mSlideshow != null) {
return;
}
SlideshowModel slideshow = SlideshowModel.createNew(mContext);
SlideModel slide = new SlideModel(slideshow);
slideshow.add(slide);
mSlideshow = slideshow;
}
/**
* Change the message's attachment to the data in the specified Uri.
* Used only for single-slide ("attachment mode") messages.
*/
private void changeMedia(int type, Uri uri) throws MmsException {
SlideModel slide = mSlideshow.get(0);
MediaModel media;
// Remove any previous attachments.
slide.removeImage();
slide.removeVideo();
slide.removeAudio();
// If we're changing to text, just bail out.
if (type == TEXT) {
return;
}
// Make a correct MediaModel for the type of attachment.
if (type == IMAGE) {
media = new ImageModel(mContext, uri, mSlideshow.getLayout().getImageRegion());
} else if (type == VIDEO) {
media = new VideoModel(mContext, uri, mSlideshow.getLayout().getImageRegion());
} else if (type == AUDIO) {
media = new AudioModel(mContext, uri);
} else {
throw new IllegalArgumentException("changeMedia type=" + type + ", uri=" + uri);
}
// Add it to the slide.
slide.add(media);
// For video and audio, set the duration of the slide to
// that of the attachment.
if (type == VIDEO || type == AUDIO) {
slide.updateDuration(media.getDuration());
}
}
/**
* Returns true if the message has an attachment (including slideshows).
*/
public boolean hasAttachment() {
return (mAttachmentType > TEXT);
}
/**
* Returns the slideshow associated with this message.
*/
public SlideshowModel getSlideshow() {
return mSlideshow;
}
/**
* Returns true if the message has a real slideshow, as opposed to just
* one image attachment, for example.
*/
public boolean hasSlideshow() {
return (mAttachmentType == SLIDESHOW);
}
/**
* Sets the MMS subject of the message. Passing null indicates that there
* is no subject. Passing "" will result in an empty subject being added
* to the message, possibly triggering a conversion to MMS. This extra
* bit of state is needed to support ComposeMessageActivity converting to
* MMS when the user adds a subject. An empty subject will be removed
* before saving to disk or sending, however.
*/
public void setSubject(CharSequence s) {
mSubject = s;
updateState(HAS_SUBJECT, (s != null), true);
}
/**
* Returns the MMS subject of the message.
*/
public CharSequence getSubject() {
return mSubject;
}
/**
* Returns true if this message has an MMS subject.
* @return
*/
public boolean hasSubject() {
return !TextUtils.isEmpty(mSubject);
}
/**
* Moves the message text into the slideshow. Should be called any time
* the message is about to be sent or written to disk.
*/
private void syncTextToSlideshow() {
if (mSlideshow == null || mSlideshow.size() != 1)
return;
SlideModel slide = mSlideshow.get(0);
TextModel text;
if (!slide.hasText()) {
// Add a TextModel to slide 0 if one doesn't already exist
text = new TextModel(mContext, ContentType.TEXT_PLAIN, "text_0.txt",
mSlideshow.getLayout().getTextRegion());
slide.add(text);
} else {
// Otherwise just reuse the existing one.
text = slide.getText();
}
text.setText(mText);
}
/**
* Sets the message text out of the slideshow. Should be called any time
* a slideshow is loaded from disk.
*/
private void syncTextFromSlideshow() {
// Don't sync text for real slideshows.
if (mSlideshow.size() != 1) {
return;
}
SlideModel slide = mSlideshow.get(0);
if (!slide.hasText()) {
return;
}
mText = slide.getText().getText();
}
/**
* Removes the subject if it is empty, possibly converting back to SMS.
*/
private void removeSubjectIfEmpty() {
if (!hasSubject()) {
setSubject(null);
}
}
/**
* Gets internal message state ready for storage. Should be called any
* time the message is about to be sent or written to disk.
*/
private void prepareForSave() {
// Make sure our working set of recipients is resolved
// to first-class Contact objects before we save.
syncWorkingRecipients();
if (requiresMms()) {
ensureSlideshow();
syncTextToSlideshow();
removeSubjectIfEmpty();
}
}
/**
* Resolve the temporary working set of recipients to a ContactList.
*/
private void syncWorkingRecipients() {
if (mWorkingRecipients != null) {
ContactList recipients = ContactList.getByNumbers(mWorkingRecipients, false);
mConversation.setRecipients(recipients);
mWorkingRecipients = null;
}
}
/**
* Force the message to be saved as MMS and return the Uri of the message.
* Typically used when handing a message off to another activity.
*/
public Uri saveAsMms() {
if (DEBUG) debug("save mConversation=%s", mConversation);
if (mDiscarded) {
throw new IllegalStateException("save() called after discard()");
}
// FORCE_MMS behaves as sort of an "invisible attachment", making
// the message seem non-empty (and thus not discarded). This bit
// is sticky until the last other MMS bit is removed, at which
// point the message will fall back to SMS.
updateState(FORCE_MMS, true, false);
// Collect our state to be written to disk.
prepareForSave();
// Make sure we are saving to the correct thread ID.
mConversation.ensureThreadId();
mConversation.setDraftState(true);
PduPersister persister = PduPersister.getPduPersister(mContext);
SendReq sendReq = makeSendReq(mConversation, mSubject);
// If we don't already have a Uri lying around, make a new one. If we do
// have one already, make sure it is synced to disk.
if (mMessageUri == null) {
mMessageUri = createDraftMmsMessage(persister, sendReq, mSlideshow);
} else {
updateDraftMmsMessage(mMessageUri, persister, mSlideshow, sendReq);
}
return mMessageUri;
}
/**
* Save this message as a draft in the conversation previously specified
* to {@link setConversation}.
*/
public void saveDraft() {
if (DEBUG) debug("saveDraft");
// If we have discarded the message, just bail out.
if (mDiscarded) {
return;
}
// Make sure setConversation was called.
if (mConversation == null) {
throw new IllegalStateException("saveDraft() called with no conversation");
}
// Get ready to write to disk.
prepareForSave();
if (requiresMms()) {
asyncUpdateDraftMmsMessage(mConversation);
} else {
asyncUpdateDraftSmsMessage(mConversation, mText.toString());
}
// Update state of the draft cache.
mConversation.setDraftState(true);
}
public void discard() {
if (DEBUG) debug("discard");
// Technically, we could probably just bail out here. But discard() is
// really meant to be called if you never want to use the message again,
// so keep this assert in as a debugging aid.
if (mDiscarded == true) {
throw new IllegalStateException("discard() called twice");
}
// Mark this message as discarded in order to make saveDraft() no-op.
mDiscarded = true;
// Delete our MMS message, if there is one.
if (mMessageUri != null) {
asyncDelete(mMessageUri, null, null);
}
// Delete any draft messages associated with this conversation.
asyncDeleteDraftSmsMessage(mConversation);
// Update state of the draft cache.
mConversation.setDraftState(false);
}
public void unDiscard() {
if (DEBUG) debug("unDiscard");
mDiscarded = false;
}
/**
* Returns true if discard() has been called on this message.
*/
public boolean isDiscarded() {
return mDiscarded;
}
/**
* To be called from our Activity's onSaveInstanceState() to give us a chance
* to stow our state away for later retrieval.
*
* @param bundle The Bundle passed in to onSaveInstanceState
*/
public void writeStateToBundle(Bundle bundle) {
if (hasSubject()) {
bundle.putString("subject", mSubject.toString());
}
if (mMessageUri != null) {
bundle.putParcelable("msg_uri", mMessageUri);
} else if (hasText()) {
bundle.putString("sms_body", mText.toString());
}
}
/**
* To be called from our Activity's onCreate() if the activity manager
* has given it a Bundle to reinflate
* @param bundle The Bundle passed in to onCreate
*/
public void readStateFromBundle(Bundle bundle) {
if (bundle == null) {
return;
}
String subject = bundle.getString("subject");
setSubject(subject);
Uri uri = (Uri)bundle.getParcelable("msg_uri");
if (uri != null) {
loadFromUri(uri);
return;
} else {
String body = bundle.getString("sms_body");
mText = body;
}
}
/**
* Update the temporary list of recipients, used when setting up a
* new conversation. Will be converted to a ContactList on any
* save event (send, save draft, etc.)
*/
public void setWorkingRecipients(List<String> numbers) {
mWorkingRecipients = numbers;
}
/**
* Set the conversation associated with this message.
*/
public void setConversation(Conversation conv) {
if (DEBUG) debug("setConversation %s -> %s", mConversation, conv);
mConversation = conv;
// Convert to MMS if there are any email addresses in the recipient list.
setHasEmail(conv.getRecipients().containsEmail());
}
/**
* Hint whether or not this message will be delivered to an
* an email address.
*/
public void setHasEmail(boolean hasEmail) {
if (MmsConfig.getEmailGateway() != null) {
updateState(RECIPIENTS_REQUIRE_MMS, false, true);
} else {
updateState(RECIPIENTS_REQUIRE_MMS, hasEmail, true);
}
}
/**
* Returns true if this message would require MMS to send.
*/
public boolean requiresMms() {
return (mMmsState > 0);
}
/**
* Set whether or not we want to send this message via MMS in order to
* avoid sending an excessive number of concatenated SMS messages.
*/
public void setLengthRequiresMms(boolean mmsRequired) {
updateState(LENGTH_REQUIRES_MMS, mmsRequired, true);
}
private static String stateString(int state) {
if (state == 0)
return "<none>";
StringBuilder sb = new StringBuilder();
if ((state & RECIPIENTS_REQUIRE_MMS) > 0)
sb.append("RECIPIENTS_REQUIRE_MMS | ");
if ((state & HAS_SUBJECT) > 0)
sb.append("HAS_SUBJECT | ");
if ((state & HAS_ATTACHMENT) > 0)
sb.append("HAS_ATTACHMENT | ");
if ((state & LENGTH_REQUIRES_MMS) > 0)
sb.append("LENGTH_REQUIRES_MMS | ");
if ((state & FORCE_MMS) > 0)
sb.append("FORCE_MMS | ");
sb.delete(sb.length() - 3, sb.length());
return sb.toString();
}
/**
* Sets the current state of our various "MMS required" bits.
*
* @param state The bit to change, such as {@link HAS_ATTACHMENT}
* @param on If true, set it; if false, clear it
* @param notify Whether or not to notify the user
*/
private void updateState(int state, boolean on, boolean notify) {
int oldState = mMmsState;
if (on) {
mMmsState |= state;
} else {
mMmsState &= ~state;
}
// If we are clearing the last bit that is not FORCE_MMS,
// expire the FORCE_MMS bit.
if (mMmsState == FORCE_MMS && ((oldState & ~FORCE_MMS) > 0)) {
mMmsState = 0;
}
// Notify the listener if we are moving from SMS to MMS
// or vice versa.
if (notify) {
if (oldState == 0 && mMmsState != 0) {
mStatusListener.onProtocolChanged(true);
} else if (oldState != 0 && mMmsState == 0) {
mStatusListener.onProtocolChanged(false);
}
}
if (oldState != mMmsState) {
if (DEBUG) debug("updateState: %s%s = %s", on ? "+" : "-",
stateString(state), stateString(mMmsState));
}
}
/**
* Send this message over the network. Will call back with onMessageSent() once
* it has been dispatched to the telephony stack. This WorkingMessage object is
* no longer useful after this method has been called.
*/
public void send() {
if (DEBUG) debug("send");
// Get ready to write to disk.
prepareForSave();
// We need the recipient list for both SMS and MMS.
final Conversation conv = mConversation;
String msgTxt = mText.toString();
if (requiresMms() || addressContainsEmailToMms(conv, msgTxt)) {
// Make local copies of the bits we need for sending a message,
// because we will be doing it off of the main thread, which will
// immediately continue on to resetting some of this state.
final Uri mmsUri = mMessageUri;
final PduPersister persister = PduPersister.getPduPersister(mContext);
final SlideshowModel slideshow = mSlideshow;
final SendReq sendReq = makeSendReq(conv, mSubject);
// Make sure the text in slide 0 is no longer holding onto a reference to the text
// in the message text box.
slideshow.prepareForSend();
// Do the dirty work of sending the message off of the main UI thread.
new Thread(new Runnable() {
public void run() {
sendMmsWorker(conv, mmsUri, persister, slideshow, sendReq);
}
}).start();
} else {
// Same rules apply as above.
final String msgText = mText.toString();
new Thread(new Runnable() {
public void run() {
sendSmsWorker(conv, msgText);
}
}).start();
}
// Mark the message as discarded because it is "off the market" after being sent.
mDiscarded = true;
}
private boolean addressContainsEmailToMms(Conversation conv, String text) {
if (MmsConfig.getEmailGateway() != null) {
String[] dests = conv.getRecipients().getNumbers();
int length = dests.length;
for (int i = 0; i < length; i++) {
if (Mms.isEmailAddress(dests[i])) {
String mtext = dests[i] + " " + text;
int[] params = SmsMessage.calculateLength(mtext, false);
if (params[0] > 1) {
updateState(RECIPIENTS_REQUIRE_MMS, true, true);
return true;
}
}
}
}
return false;
}
// Message sending stuff
private void sendSmsWorker(Conversation conv, String msgText) {
// Make sure we are still using the correct thread ID for our
// recipient set.
long threadId = conv.ensureThreadId();
String[] dests = conv.getRecipients().getNumbers();
MessageSender sender = new SmsMessageSender(mContext, dests, msgText, threadId);
try {
sender.sendMessage(threadId);
// Make sure this thread isn't over the limits in message count
Recycler.getSmsRecycler().deleteOldMessagesByThreadId(mContext, threadId);
} catch (Exception e) {
Log.e(TAG, "Failed to send SMS message, threadId=" + threadId, e);
}
mStatusListener.onMessageSent();
}
private void sendMmsWorker(Conversation conv, Uri mmsUri, PduPersister persister,
SlideshowModel slideshow, SendReq sendReq) {
// Make sure we are still using the correct thread ID for our
// recipient set.
long threadId = conv.ensureThreadId();
if (DEBUG) debug("sendMmsWorker: update draft MMS message " + mmsUri);
if (mmsUri == null) {
// Create a new MMS message if one hasn't been made yet.
mmsUri = createDraftMmsMessage(persister, sendReq, slideshow);
} else {
// Otherwise, sync the MMS message in progress to disk.
updateDraftMmsMessage(mmsUri, persister, slideshow, sendReq);
}
// Be paranoid and clean any draft SMS up.
deleteDraftSmsMessage(threadId);
MessageSender sender = new MmsMessageSender(mContext, mmsUri);
try {
if (!sender.sendMessage(threadId)) {
// The message was sent through SMS protocol, we should
// delete the copy which was previously saved in MMS drafts.
SqliteWrapper.delete(mContext, mContentResolver, mmsUri, null, null);
}
// Make sure this thread isn't over the limits in message count
Recycler.getMmsRecycler().deleteOldMessagesByThreadId(mContext, threadId);
} catch (Exception e) {
Log.e(TAG, "Failed to send message: " + mmsUri + ", threadId=" + threadId, e);
}
mStatusListener.onMessageSent();
}
// Draft message stuff
private static final String[] MMS_DRAFT_PROJECTION = {
Mms._ID, // 0
Mms.SUBJECT // 1
};
private static final int MMS_ID_INDEX = 0;
private static final int MMS_SUBJECT_INDEX = 1;
private static Uri readDraftMmsMessage(Context context, long threadId, StringBuilder sb) {
if (DEBUG) debug("readDraftMmsMessage tid=%d", threadId);
Cursor cursor;
ContentResolver cr = context.getContentResolver();
final String selection = Mms.THREAD_ID + " = " + threadId;
cursor = SqliteWrapper.query(context, cr,
Mms.Draft.CONTENT_URI, MMS_DRAFT_PROJECTION,
selection, null, null);
Uri uri;
try {
if (cursor.moveToFirst()) {
uri = ContentUris.withAppendedId(Mms.Draft.CONTENT_URI,
cursor.getLong(MMS_ID_INDEX));
String subject = cursor.getString(MMS_SUBJECT_INDEX);
if (subject != null) {
sb.append(subject);
}
return uri;
}
} finally {
cursor.close();
}
return null;
}
private static SendReq makeSendReq(Conversation conv, CharSequence subject) {
String[] dests = conv.getRecipients().getNumbers();
SendReq req = new SendReq();
EncodedStringValue[] encodedNumbers = EncodedStringValue.encodeStrings(dests);
if (encodedNumbers != null) {
req.setTo(encodedNumbers);
}
if (!TextUtils.isEmpty(subject)) {
req.setSubject(new EncodedStringValue(subject.toString()));
}
req.setDate(System.currentTimeMillis() / 1000L);
return req;
}
private static Uri createDraftMmsMessage(PduPersister persister, SendReq sendReq,
SlideshowModel slideshow) {
try {
PduBody pb = slideshow.toPduBody();
sendReq.setBody(pb);
Uri res = persister.persist(sendReq, Mms.Draft.CONTENT_URI);
slideshow.sync(pb);
return res;
} catch (MmsException e) {
return null;
}
}
private void asyncUpdateDraftMmsMessage(final Conversation conv) {
if (DEBUG) debug("asyncUpdateDraftMmsMessage conv=%s mMessageUri=%s", conv, mMessageUri);
final PduPersister persister = PduPersister.getPduPersister(mContext);
final SendReq sendReq = makeSendReq(conv, mSubject);
new Thread(new Runnable() {
public void run() {
conv.ensureThreadId();
conv.setDraftState(true);
if (mMessageUri == null) {
mMessageUri = createDraftMmsMessage(persister, sendReq, mSlideshow);
} else {
updateDraftMmsMessage(mMessageUri, persister, mSlideshow, sendReq);
}
}
}).start();
// Be paranoid and delete any SMS drafts that might be lying around.
asyncDeleteDraftSmsMessage(conv);
}
private static void updateDraftMmsMessage(Uri uri, PduPersister persister,
SlideshowModel slideshow, SendReq sendReq) {
if (DEBUG) debug("updateDraftMmsMessage uri=%s", uri);
persister.updateHeaders(uri, sendReq);
final PduBody pb = slideshow.toPduBody();
try {
persister.updateParts(uri, pb);
} catch (MmsException e) {
Log.e(TAG, "updateDraftMmsMessage: cannot update message " + uri);
}
slideshow.sync(pb);
}
private static final String SMS_DRAFT_WHERE = Sms.TYPE + "=" + Sms.MESSAGE_TYPE_DRAFT;
private static final String[] SMS_BODY_PROJECTION = { Sms.BODY };
private static final int SMS_BODY_INDEX = 0;
/**
* Reads a draft message for the given thread ID from the database,
* if there is one, deletes it from the database, and returns it.
* @return The draft message or an empty string.
*/
private static String readDraftSmsMessage(Context context, long thread_id, Conversation conv) {
if (DEBUG) debug("readDraftSmsMessage tid=%d", thread_id);
ContentResolver cr = context.getContentResolver();
// If it's an invalid thread, don't bother.
if (thread_id <= 0) {
return "";
}
Uri thread_uri = ContentUris.withAppendedId(Sms.Conversations.CONTENT_URI, thread_id);
String body = "";
Cursor c = SqliteWrapper.query(context, cr,
thread_uri, SMS_BODY_PROJECTION, SMS_DRAFT_WHERE, null, null);
try {
if (c.moveToFirst()) {
body = c.getString(SMS_BODY_INDEX);
}
} finally {
c.close();
}
// Clean out drafts for this thread -- if the recipient set changes,
// we will lose track of the original draft and be unable to delete
// it later. The message will be re-saved if necessary upon exit of
// the activity.
SqliteWrapper.delete(context, cr, thread_uri, SMS_DRAFT_WHERE, null);
// We found a draft, and if there are no messages in the conversation,
// that means we deleted the thread, too. Must reset the thread id
// so we'll eventually create a new thread.
if (conv.getMessageCount() == 0) {
if (DEBUG) debug("readDraftSmsMessage calling clearThreadId");
conv.clearThreadId();
}
return body;
}
private void asyncUpdateDraftSmsMessage(final Conversation conv, final String contents) {
new Thread(new Runnable() {
public void run() {
long threadId = conv.ensureThreadId();
conv.setDraftState(true);
updateDraftSmsMessage(threadId, contents);
}
}).start();
}
private void updateDraftSmsMessage(long thread_id, String contents) {
if (DEBUG) debug("updateDraftSmsMessage tid=%d, contents=\"%s\"", thread_id, contents);
// If we don't have a valid thread, there's nothing to do.
if (thread_id <= 0) {
return;
}
// Don't bother saving an empty message.
if (TextUtils.isEmpty(contents)) {
// But delete the old draft message if it's there.
deleteDraftSmsMessage(thread_id);
return;
}
ContentValues values = new ContentValues(3);
values.put(Sms.THREAD_ID, thread_id);
values.put(Sms.BODY, contents);
values.put(Sms.TYPE, Sms.MESSAGE_TYPE_DRAFT);
SqliteWrapper.insert(mContext, mContentResolver, Sms.CONTENT_URI, values);
asyncDeleteDraftMmsMessage(thread_id);
}
private void asyncDelete(final Uri uri, final String selection, final String[] selectionArgs) {
if (DEBUG) debug("asyncDelete %s where %s", uri, selection);
new Thread(new Runnable() {
public void run() {
SqliteWrapper.delete(mContext, mContentResolver, uri, selection, selectionArgs);
}
}).start();
}
private void asyncDeleteDraftSmsMessage(Conversation conv) {
long threadId = conv.getThreadId();
if (threadId > 0) {
asyncDelete(ContentUris.withAppendedId(Sms.Conversations.CONTENT_URI, threadId),
SMS_DRAFT_WHERE, null);
}
}
private void deleteDraftSmsMessage(long threadId) {
SqliteWrapper.delete(mContext, mContentResolver,
ContentUris.withAppendedId(Sms.Conversations.CONTENT_URI, threadId),
SMS_DRAFT_WHERE, null);
}
private void asyncDeleteDraftMmsMessage(long threadId) {
final String where = Mms.THREAD_ID + " = " + threadId;
asyncDelete(Mms.Draft.CONTENT_URI, where, null);
}
// Logging stuff.
private static String prettyArray(String[] array) {
if (array.length == 0) {
return "[]";
}
StringBuilder sb = new StringBuilder("[");
int len = array.length-1;
for (int i = 0; i < len; i++) {
sb.append(array[i]);
sb.append(", ");
}
sb.append(array[len]);
sb.append("]");
return sb.toString();
}
private static String logFormat(String format, Object... args) {
for (int i = 0; i < args.length; i++) {
if (args[i] instanceof String[]) {
args[i] = prettyArray((String[])args[i]);
}
}
String s = String.format(format, args);
s = "[" + Thread.currentThread().getId() + "] " + s;
return s;
}
private static void debug(String format, Object... args) {
Log.d(TAG, logFormat(format, args));
}
private static void warn(String format, Object... args) {
Log.w(TAG, logFormat(format, args));
}
private static void error(String format, Object... args) {
Log.e(TAG, logFormat(format, args));
}
}
| Fix crash when sending SMS to email address
Must call ensureSlideshow() and syncTextToSlideshow(), otherwise in send()
function, variable "slideshow" will be NULL and results in force close.
| src/com/android/mms/data/WorkingMessage.java | Fix crash when sending SMS to email address |
|
Java | apache-2.0 | 9b5fb39a4bcd6bfd26754a9d827e7670805f8369 | 0 | mrniko/redisson,redisson/redisson,jackygurui/redisson | /**
* Copyright 2016 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.redisson.connection;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.redisson.connection.ClientConnectionsEntry.FreezeReason;
import org.redisson.misc.URIBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.netty.channel.epoll.EpollDatagramChannel;
import io.netty.channel.socket.DatagramChannel;
import io.netty.channel.socket.nio.NioDatagramChannel;
import io.netty.resolver.AddressResolver;
import io.netty.resolver.dns.DefaultDnsServerAddressStreamProvider;
import io.netty.resolver.dns.DnsAddressResolverGroup;
import io.netty.util.concurrent.Future;
import io.netty.util.concurrent.FutureListener;
import io.netty.util.concurrent.ScheduledFuture;
/**
* DNS changes monitor.
*
* @author Nikita Koksharov
*
*/
public class DNSMonitor {
private static final Logger log = LoggerFactory.getLogger(DNSMonitor.class);
private final DnsAddressResolverGroup resolverGroup;
private ScheduledFuture<?> dnsMonitorFuture;
private ConnectionManager connectionManager;
private final Map<URI, InetAddress> masters = new HashMap<URI, InetAddress>();
private final Map<URI, InetAddress> slaves = new HashMap<URI, InetAddress>();
private long dnsMonitoringInterval;
public DNSMonitor(ConnectionManager connectionManager, Set<URI> masterHosts, Set<URI> slaveHosts, long dnsMonitoringInterval) {
Class<? extends DatagramChannel> channelClass;
if (connectionManager.getCfg().isUseLinuxNativeEpoll()) {
channelClass = EpollDatagramChannel.class;
} else {
channelClass = NioDatagramChannel.class;
}
resolverGroup = new DnsAddressResolverGroup(channelClass, DefaultDnsServerAddressStreamProvider.INSTANCE);
AddressResolver<InetSocketAddress> resolver = resolverGroup.getResolver(connectionManager.getGroup().next());
for (URI host : masterHosts) {
Future<InetSocketAddress> resolveFuture = resolver.resolve(InetSocketAddress.createUnresolved(host.getHost(), 0));
resolveFuture.syncUninterruptibly();
masters.put(host, resolveFuture.getNow().getAddress());
}
for (URI host : slaveHosts) {
Future<InetSocketAddress> resolveFuture = resolver.resolve(InetSocketAddress.createUnresolved(host.getHost(), 0));
resolveFuture.syncUninterruptibly();
slaves.put(host, resolveFuture.getNow().getAddress());
}
this.connectionManager = connectionManager;
this.dnsMonitoringInterval = dnsMonitoringInterval;
}
public void start() {
monitorDnsChange();
log.debug("DNS monitoring enabled; Current masters: {}, slaves: {}", masters, slaves);
}
public void stop() {
if (dnsMonitorFuture != null) {
dnsMonitorFuture.cancel(true);
}
}
private void monitorDnsChange() {
dnsMonitorFuture = connectionManager.getGroup().schedule(new Runnable() {
@Override
public void run() {
final AddressResolver<InetSocketAddress> resolver = resolverGroup.getResolver(connectionManager.getGroup().next());
final AtomicInteger counter = new AtomicInteger(masters.size() + slaves.size());
for (final Entry<URI, InetAddress> entry : masters.entrySet()) {
Future<InetSocketAddress> resolveFuture = resolver.resolve(InetSocketAddress.createUnresolved(entry.getKey().getHost(), 0));
resolveFuture.addListener(new FutureListener<InetSocketAddress>() {
@Override
public void operationComplete(Future<InetSocketAddress> future) throws Exception {
if (counter.decrementAndGet() == 0) {
monitorDnsChange();
}
if (!future.isSuccess()) {
log.error("Unable to resolve " + entry.getKey().getHost(), future.cause());
return;
}
InetAddress master = entry.getValue();
InetAddress now = future.get().getAddress();
if (!now.getHostAddress().equals(master.getHostAddress())) {
log.info("Detected DNS change. {} has changed from {} to {}", entry.getKey().getHost(), master.getHostAddress(), now.getHostAddress());
for (MasterSlaveEntry entrySet : connectionManager.getEntrySet()) {
if (entrySet.getClient().getAddr().getHostName().equals(entry.getKey().getHost())
&& entrySet.getClient().getAddr().getPort() == entry.getKey().getPort()) {
entrySet.changeMaster(entry.getKey());
}
}
masters.put(entry.getKey(), now);
log.info("Master {} has been changed", entry.getKey().getHost());
}
}
});
}
for (final Entry<URI, InetAddress> entry : slaves.entrySet()) {
Future<InetSocketAddress> resolveFuture = resolver.resolve(InetSocketAddress.createUnresolved(entry.getKey().getHost(), 0));
resolveFuture.addListener(new FutureListener<InetSocketAddress>() {
@Override
public void operationComplete(Future<InetSocketAddress> future) throws Exception {
if (counter.decrementAndGet() == 0) {
monitorDnsChange();
}
if (!future.isSuccess()) {
log.error("Unable to resolve " + entry.getKey().getHost(), future.cause());
return;
}
InetAddress slave = entry.getValue();
InetAddress updatedSlave = future.get().getAddress();
if (!updatedSlave.getHostAddress().equals(slave.getHostAddress())) {
log.info("Detected DNS change. {} has changed from {} to {}", entry.getKey().getHost(), slave.getHostAddress(), updatedSlave.getHostAddress());
for (MasterSlaveEntry masterSlaveEntry : connectionManager.getEntrySet()) {
URI uri = URIBuilder.create(slave.getHostAddress() + ":" + entry.getKey().getPort());
if (masterSlaveEntry.slaveDown(uri, FreezeReason.MANAGER)) {
masterSlaveEntry.slaveUp(entry.getKey(), FreezeReason.MANAGER);
}
}
slaves.put(entry.getKey(), updatedSlave);
log.info("Slave {} has been changed", entry.getKey().getHost());
}
}
});
}
}
}, dnsMonitoringInterval, TimeUnit.MILLISECONDS);
}
}
| redisson/src/main/java/org/redisson/connection/DNSMonitor.java | /**
* Copyright 2016 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.redisson.connection;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.redisson.connection.ClientConnectionsEntry.FreezeReason;
import org.redisson.misc.URIBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import io.netty.channel.socket.nio.NioDatagramChannel;
import io.netty.resolver.AddressResolver;
import io.netty.resolver.dns.DefaultDnsServerAddressStreamProvider;
import io.netty.resolver.dns.DnsAddressResolverGroup;
import io.netty.util.concurrent.Future;
import io.netty.util.concurrent.FutureListener;
import io.netty.util.concurrent.ScheduledFuture;
/**
* DNS changes monitor.
*
* @author Nikita Koksharov
*
*/
public class DNSMonitor {
private static final Logger log = LoggerFactory.getLogger(DNSMonitor.class);
private DnsAddressResolverGroup resolverGroup = new DnsAddressResolverGroup(NioDatagramChannel.class, DefaultDnsServerAddressStreamProvider.INSTANCE);
private ScheduledFuture<?> dnsMonitorFuture;
private ConnectionManager connectionManager;
private final Map<URI, InetAddress> masters = new HashMap<URI, InetAddress>();
private final Map<URI, InetAddress> slaves = new HashMap<URI, InetAddress>();
private long dnsMonitoringInterval;
public DNSMonitor(ConnectionManager connectionManager, Set<URI> masterHosts, Set<URI> slaveHosts, long dnsMonitoringInterval) {
AddressResolver<InetSocketAddress> resolver = resolverGroup.getResolver(connectionManager.getGroup().next());
for (URI host : masterHosts) {
Future<InetSocketAddress> resolveFuture = resolver.resolve(InetSocketAddress.createUnresolved(host.getHost(), 0));
resolveFuture.syncUninterruptibly();
masters.put(host, resolveFuture.getNow().getAddress());
}
for (URI host : slaveHosts) {
Future<InetSocketAddress> resolveFuture = resolver.resolve(InetSocketAddress.createUnresolved(host.getHost(), 0));
resolveFuture.syncUninterruptibly();
slaves.put(host, resolveFuture.getNow().getAddress());
}
this.connectionManager = connectionManager;
this.dnsMonitoringInterval = dnsMonitoringInterval;
}
public void start() {
monitorDnsChange();
log.debug("DNS monitoring enabled; Current masters: {}, slaves: {}", masters, slaves);
}
public void stop() {
if (dnsMonitorFuture != null) {
dnsMonitorFuture.cancel(true);
}
}
private void monitorDnsChange() {
dnsMonitorFuture = connectionManager.getGroup().schedule(new Runnable() {
@Override
public void run() {
final AddressResolver<InetSocketAddress> resolver = resolverGroup.getResolver(connectionManager.getGroup().next());
final AtomicInteger counter = new AtomicInteger(masters.size() + slaves.size());
for (final Entry<URI, InetAddress> entry : masters.entrySet()) {
Future<InetSocketAddress> resolveFuture = resolver.resolve(InetSocketAddress.createUnresolved(entry.getKey().getHost(), 0));
resolveFuture.addListener(new FutureListener<InetSocketAddress>() {
@Override
public void operationComplete(Future<InetSocketAddress> future) throws Exception {
if (counter.decrementAndGet() == 0) {
monitorDnsChange();
}
if (!future.isSuccess()) {
log.error("Unable to resolve " + entry.getKey().getHost(), future.cause());
return;
}
InetAddress master = entry.getValue();
InetAddress now = future.get().getAddress();
if (!now.getHostAddress().equals(master.getHostAddress())) {
log.info("Detected DNS change. {} has changed from {} to {}", entry.getKey().getHost(), master.getHostAddress(), now.getHostAddress());
for (MasterSlaveEntry entrySet : connectionManager.getEntrySet()) {
if (entrySet.getClient().getAddr().getHostName().equals(entry.getKey().getHost())
&& entrySet.getClient().getAddr().getPort() == entry.getKey().getPort()) {
entrySet.changeMaster(entry.getKey());
}
}
masters.put(entry.getKey(), now);
log.info("Master {} has been changed", entry.getKey().getHost());
}
}
});
}
for (final Entry<URI, InetAddress> entry : slaves.entrySet()) {
Future<InetSocketAddress> resolveFuture = resolver.resolve(InetSocketAddress.createUnresolved(entry.getKey().getHost(), 0));
resolveFuture.addListener(new FutureListener<InetSocketAddress>() {
@Override
public void operationComplete(Future<InetSocketAddress> future) throws Exception {
if (counter.decrementAndGet() == 0) {
monitorDnsChange();
}
if (!future.isSuccess()) {
log.error("Unable to resolve " + entry.getKey().getHost(), future.cause());
return;
}
InetAddress slave = entry.getValue();
InetAddress updatedSlave = future.get().getAddress();
if (!updatedSlave.getHostAddress().equals(slave.getHostAddress())) {
log.info("Detected DNS change. {} has changed from {} to {}", entry.getKey().getHost(), slave.getHostAddress(), updatedSlave.getHostAddress());
for (MasterSlaveEntry masterSlaveEntry : connectionManager.getEntrySet()) {
URI uri = URIBuilder.create(slave.getHostAddress() + ":" + entry.getKey().getPort());
if (masterSlaveEntry.slaveDown(uri, FreezeReason.MANAGER)) {
masterSlaveEntry.slaveUp(entry.getKey(), FreezeReason.MANAGER);
}
}
slaves.put(entry.getKey(), updatedSlave);
log.info("Slave {} has been changed", entry.getKey().getHost());
}
}
});
}
}
}, dnsMonitoringInterval, TimeUnit.MILLISECONDS);
}
}
| Fixed - DNSMonitor fails on bootstrap with custom event loop. #1188
| redisson/src/main/java/org/redisson/connection/DNSMonitor.java | Fixed - DNSMonitor fails on bootstrap with custom event loop. #1188 |
|
Java | apache-2.0 | 60ed202dfc5ed6323b6c0bbd5aaa649acbc3db4b | 0 | MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab | package org.myrobotlab.service;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.alicebot.ab.AIMLMap;
import org.alicebot.ab.AIMLSet;
import org.alicebot.ab.Bot;
import org.alicebot.ab.Category;
import org.alicebot.ab.Chat;
import org.alicebot.ab.Predicates;
import org.myrobotlab.framework.Service;
import org.myrobotlab.framework.ServiceType;
import org.myrobotlab.framework.interfaces.ServiceInterface;
import org.myrobotlab.io.FileIO;
import org.myrobotlab.logging.LoggerFactory;
import org.myrobotlab.logging.LoggingFactory;
import org.myrobotlab.programab.ChatData;
import org.myrobotlab.programab.OOBPayload;
import org.myrobotlab.service.interfaces.TextListener;
import org.myrobotlab.service.interfaces.TextPublisher;
import org.slf4j.Logger;
/**
* Program AB service for MyRobotLab Uses AIML 2.0 to create a ChatBot This is a
* reboot of the Old AIML spec to be more 21st century.
*
* More Info at http://aitools.org/ProgramAB
*
* @author kwatters
*
*/
public class ProgramAB extends Service implements TextListener, TextPublisher {
transient public final static Logger log = LoggerFactory.getLogger(ProgramAB.class);
public static class Response {
public String session;
public String msg;
public List<OOBPayload> payloads;
// FIXME - timestamps are usually longs System.currentTimeMillis()
public Date timestamp;
public Response(String session, String msg, List<OOBPayload> payloads, Date timestamp) {
this.session = session;
this.msg = msg;
this.payloads = payloads;
this.timestamp = timestamp;
}
public String toString() {
return String.format("%d %s %s", timestamp.getTime(), session, msg);
}
}
transient Bot bot = null;
HashSet<String> bots = new HashSet<String>();
String path = "ProgramAB";
/**
* botName - is un-initialized to preserve serialization stickyness
*/
// String botName;
// This is the username that is chatting with the bot.
// String currentSession = "default";
// Session is a user and a bot. so the key to the session should be the
// username, and the bot name.
HashMap<String, ChatData> sessions = new HashMap<String, ChatData>();
// TODO: better parsing than a regex...
transient Pattern oobPattern = Pattern.compile("<oob>.*?</oob>", Pattern.CASE_INSENSITIVE | Pattern.DOTALL | Pattern.MULTILINE);
transient Pattern mrlPattern = Pattern.compile("<mrl>.*?</mrl>", Pattern.CASE_INSENSITIVE | Pattern.DOTALL | Pattern.MULTILINE);
// a guaranteed bot we have
String currentBotName = "alice2";
// this is the username that is chatting with the bot.
String currentUserName = "default";
static final long serialVersionUID = 1L;
static int savePredicatesInterval = 60 * 1000 * 5; // every 5 minutes
public String wasCleanyShutdowned;
public ProgramAB(String name) {
super(name);
// Tell programAB to persist it's learned predicates about people
// every 30 seconds.
addTask("savePredicates", savePredicatesInterval, 0, "savePredicates");
// TODO: Lazy load this!
// look for local bots defined
File programAbDir = new File(String.format("%s/bots", path));
if (!programAbDir.exists() || !programAbDir.isDirectory()) {
log.info("%s does not exist !!!");
} else {
File[] listOfFiles = programAbDir.listFiles();
for (int i = 0; i < listOfFiles.length; i++) {
if (listOfFiles[i].isFile()) {
// System.out.println("File " + listOfFiles[i].getName());
} else if (listOfFiles[i].isDirectory()) {
bots.add(listOfFiles[i].getName());
}
}
}
}
public void addOOBTextListener(TextListener service) {
addListener("publishOOBText", service.getName(), "onOOBText");
}
public void addResponseListener(Service service) {
addListener("publishResponse", service.getName(), "onResponse");
}
public void addTextListener(TextListener service) {
addListener("publishText", service.getName(), "onText");
}
public void addTextPublisher(TextPublisher service) {
addListener("publishText", service.getName(), "onText");
}
private void cleanOutOfDateAimlIFFiles(String botName) {
String aimlPath = path + File.separator + "bots" + File.separator + botName + File.separator + "aiml";
String aimlIFPath = path + File.separator + "bots" + File.separator + botName + File.separator + "aimlif";
log.info("AIML FILES:");
File folder = new File(aimlPath);
File folderaimlIF = new File(aimlIFPath);
if (!folder.exists()) {
log.info("{} does not exist", aimlPath);
return;
}
if (wasCleanyShutdowned == null || wasCleanyShutdowned.isEmpty()) {
wasCleanyShutdowned="firstStart";
}
if (wasCleanyShutdowned.equals("nok")) {
if (folderaimlIF.exists()) {
// warn("Bad previous shutdown, ProgramAB need to recompile AimlIf files. Don't worry.");
log.info("Bad previous shutdown, ProgramAB need to recompile AimlIf files. Don't worry.");
for (File f : folderaimlIF.listFiles()) {
f.delete();
}
}
}
log.info(folder.getAbsolutePath());
HashMap<String, Long> modifiedDates = new HashMap<String, Long>();
for (File f : folder.listFiles()) {
log.info(f.getAbsolutePath());
// TODO: better stripping of the file extension
String aiml = f.getName().replace(".aiml", "");
modifiedDates.put(aiml, f.lastModified());
}
log.info("AIMLIF FILES:");
folderaimlIF = new File(aimlIFPath);
if (!folderaimlIF.exists()) {
// TODO: throw an exception warn / log ?
log.info("aimlif directory missing,creating it. " + folderaimlIF.getAbsolutePath());
folderaimlIF.mkdirs();
return;
}
for (File f : folderaimlIF.listFiles()) {
log.info(f.getAbsolutePath());
// TODO: better stripping of the file extension
String aimlIF = f.getName().replace(".aiml.csv", "");
Long lastMod = modifiedDates.get(aimlIF);
if (lastMod != null) {
if (f.lastModified() < lastMod) {
// the AIMLIF file is newer than the AIML file.
// delete the AIMLIF file so ProgramAB recompiles it
// properly.
log.info("Deleteing AIMLIF file because the original AIML file was modified. {}", aimlIF);
f.delete();
// edit moz4r : we need to change the last modification date to aiml folder for recompilation
sleep(1000);
String fil=aimlPath+File.separator+"folder_updated";
File file = new File(fil);
file.delete();
try{
PrintWriter writer = new PrintWriter(fil, "UTF-8");
writer.println(lastMod.toString());
writer.close();
} catch (IOException e) {
// do something
}
}
}
}
}
private String createSessionPredicateFilename(String username, String botName) {
// TODO: sanitize the session label so it can be safely used as a filename
String predicatePath = path + File.separator + "bots" + File.separator + botName + File.separator + "config";
// just in case the directory doesn't exist.. make it.
File predDir = new File(predicatePath);
if (!predDir.exists()) {
predDir.mkdirs();
}
predicatePath += File.separator + username + ".predicates.txt";
return predicatePath;
}
public int getMaxConversationDelay() {
return sessions.get(resolveSessionKey(currentUserName, currentBotName)).maxConversationDelay;
}
public Response getResponse(String text) {
return getResponse(currentUserName, text);
}
/**
*
* @param text
* - the query string to the bot brain
* @param username
* - the user that is sending the query
* @param botName
* - the name of the bot you which to get the response from
* @return the response for a user from a bot given the input text.
*/
public Response getResponse(String username, String botName, String text) {
this.currentBotName = botName;
return getResponse(username, text);
}
public Response getResponse(String username, String text) {
log.info("Get Response for : user {} bot {} : {}", username, currentBotName, text);
if (bot == null) {
String error = "ERROR: Core not loaded, please load core before chatting.";
error(error);
return new Response(username, error, null, new Date());
}
String sessionKey = resolveSessionKey(username, currentBotName);
if (!sessions.containsKey(sessionKey)) {
startSession(path, username, currentBotName);
}
ChatData chatData = sessions.get(sessionKey);
String res = getChat(username, currentBotName).multisentenceRespond(text);
// grab and update the time when this response came in.
chatData.lastResponseTime = new Date();
// Check the AIML response to see if there is OOB (out of band data)
// If so, publish that data independent of the text response.
List<OOBPayload> payloads = null;
if (chatData.processOOB) {
payloads = processOOB(res);
}
// OOB text should not be published as part of the response text.
Matcher matcher = oobPattern.matcher(res);
res = matcher.replaceAll("").trim();
Response response = new Response(username, res, payloads, chatData.lastResponseTime);
// Now that we've said something, lets create a timer task to wait for N
// seconds
// and if nothing has been said.. try say something else.
// TODO: trigger a task to respond with something again
// if the humans get bored
if (chatData.enableAutoConversation) {
// schedule one future reply. (always get the last word in..)
// int numExecutions = 1;
// TODO: we need a way for the task to just execute one time
// it'd be good to have access to the timer here, but it's transient
addTask("getResponse", chatData.maxConversationDelay, 0, "getResponse", username, text);
}
// EEK! clean up the API!
invoke("publishResponse", response);
invoke("publishResponseText", response);
invoke("publishText", response.msg);
info("to: %s - %s", username, res);
// if (log.isDebugEnabled()) {
// for (String key : sessions.get(session).predicates.keySet()) {
// log.debug(session + " " + key + " " +
// sessions.get(session).predicates.get(key));
// }
// }
// TODO: wire this in so the gui updates properly. ??
// broadcastState();
return response;
}
public String resolveSessionKey(String username, String botname) {
return username + "-" + botname;
}
public void repetition_count(int val)
{
org.alicebot.ab.MagicNumbers.repetition_count=val;
}
public Chat getChat(String userName, String botName) {
String sessionKey = resolveSessionKey(userName, botName);
if (!sessions.containsKey(sessionKey)) {
error("%s session does not exist", sessionKey);
return null;
} else {
return sessions.get(sessionKey).chat;
}
}
public void removePredicate(String userName, String predicateName) {
removePredicate(userName, currentBotName, predicateName);
}
public void removePredicate(String userName, String botName, String predicateName) {
Predicates preds = getChat(userName, botName).predicates;
preds.remove(predicateName);
}
public void addToSet(String setName, String setValue) {
// add to the set for the bot.
AIMLSet updateSet = bot.setMap.get(setName);
setValue = setValue.toUpperCase().trim();
if (updateSet != null) {
updateSet.add(setValue);
// persist to disk.
updateSet.writeAIMLSet();
} else {
log.info("Unknown AIML set: {}. A new set will be created. ", setName);
// TODO: should we create a new set ? or just log this warning?
// The AIML Set doesn't exist. Lets create a new one
AIMLSet newSet = new AIMLSet(setName, bot);
newSet.add(setValue);
newSet.writeAIMLSet();
}
}
public void addToMap(String mapName, String mapKey, String mapValue) {
// add an entry to the map.
AIMLMap updateMap = bot.mapMap.get(mapName);
mapKey = mapKey.toUpperCase().trim();
if (updateMap != null) {
updateMap.put(mapKey, mapValue);
// persist to disk!
updateMap.writeAIMLMap();
} else {
log.info("Unknown AIML map: {}. A new MAP will be created. ", mapName);
// dynamically create new maps?!
AIMLMap newMap = new AIMLMap(mapName, bot);
newMap.put(mapKey, mapValue);
newMap.writeAIMLMap();
}
}
public void setPredicate(String username, String predicateName, String predicateValue) {
Predicates preds = getChat(username, currentBotName).predicates;
preds.put(predicateName, predicateValue);
}
public void unsetPredicate(String username, String predicateName) {
Predicates preds = getChat(username, currentBotName).predicates;
preds.remove(predicateName);
}
public String getPredicate(String username, String predicateName) {
Predicates preds = getChat(username, currentBotName).predicates;
return preds.get(predicateName);
}
/**
* Only respond if the last response was longer than delay ms ago
*
* @param session
* - current session/username
* @param text
* - text to get a response for
* @param delay
* - min amount of time that must have transpired since the last
* response.
* @return the response
*/
public Response getResponse(String session, String text, Long delay) {
ChatData chatData = sessions.get(session);
long delta = System.currentTimeMillis() - chatData.lastResponseTime.getTime();
if (delta > delay) {
return getResponse(session, text);
} else {
return null;
}
}
public boolean isEnableAutoConversation() {
return sessions.get(resolveSessionKey(currentUserName, currentBotName)).enableAutoConversation;
}
public boolean isProcessOOB() {
return sessions.get(resolveSessionKey(currentUserName, currentBotName)).processOOB;
}
/**
* Return a list of all patterns that the AIML Bot knows to match against.
*
* @param botName the bots name from which to return it's patterns.
* @return a list of all patterns loaded into the aiml brain
*/
public ArrayList<String> listPatterns(String botName) {
ArrayList<String> patterns = new ArrayList<String>();
for (Category c : bot.brain.getCategories()) {
patterns.add(c.getPattern());
}
return patterns;
}
/**
* Return the number of milliseconds since the last response was given -1 if a
* response has never been given.
* @return milliseconds
*/
public long millisecondsSinceLastResponse() {
ChatData chatData = sessions.get(resolveSessionKey(currentUserName, currentBotName));
if (chatData.lastResponseTime == null) {
return -1;
}
long delta = System.currentTimeMillis() - chatData.lastResponseTime.getTime();
return delta;
}
@Override
public void onText(String text) {
// What else should we do here? seems reasonable to just do this.
// this should actually call getResponse
// on input, get the proper response
// Response resp = getResponse(text);
getResponse(text);
// push that to the next end point.
// invoke("publishText", resp.msg);
}
private OOBPayload parseOOB(String oobPayload) {
// TODO: fix the damn double encoding issue.
// we have user entered text in the service/method
// and params values.
// grab the service
Pattern servicePattern = Pattern.compile("<service>(.*?)</service>", Pattern.CASE_INSENSITIVE | Pattern.DOTALL | Pattern.MULTILINE);
Matcher serviceMatcher = servicePattern.matcher(oobPayload);
serviceMatcher.find();
String serviceName = serviceMatcher.group(1);
Pattern methodPattern = Pattern.compile("<method>(.*?)</method>", Pattern.CASE_INSENSITIVE | Pattern.DOTALL | Pattern.MULTILINE);
Matcher methodMatcher = methodPattern.matcher(oobPayload);
methodMatcher.find();
String methodName = methodMatcher.group(1);
Pattern paramPattern = Pattern.compile("<param>(.*?)</param>", Pattern.CASE_INSENSITIVE | Pattern.DOTALL | Pattern.MULTILINE);
Matcher paramMatcher = paramPattern.matcher(oobPayload);
ArrayList<String> params = new ArrayList<String>();
while (paramMatcher.find()) {
// We found some OOB text.
// assume only one OOB in the text?
String param = paramMatcher.group(1);
params.add(param);
}
OOBPayload payload = new OOBPayload(serviceName, methodName, params);
// log.info(payload.toString());
return payload;
// JAXB stuff blows up because the response from program ab is already
// xml decoded!
//
// JAXBContext jaxbContext;
// try {
// jaxbContext = JAXBContext.newInstance(OOBPayload.class);
// Unmarshaller jaxbUnmarshaller = jaxbContext.createUnmarshaller();
// log.info("OOB PAYLOAD :" + oobPayload);
// Reader r = new StringReader(oobPayload);
// OOBPayload oobMsg = (OOBPayload) jaxbUnmarshaller.unmarshal(r);
// return oobMsg;
// } catch (JAXBException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
// log.info("OOB tag found, but it's not an MRL tag. {}", oobPayload);
// return null;
}
private List<OOBPayload> processOOB(String text) {
// Find any oob tags
ArrayList<OOBPayload> payloads = new ArrayList<OOBPayload>();
Matcher oobMatcher = oobPattern.matcher(text);
while (oobMatcher.find()) {
// We found some OOB text.
// assume only one OOB in the text?
String oobPayload = oobMatcher.group(0);
Matcher mrlMatcher = mrlPattern.matcher(oobPayload);
while (mrlMatcher.find()) {
String mrlPayload = mrlMatcher.group(0);
OOBPayload payload = parseOOB(mrlPayload);
payloads.add(payload);
// TODO: maybe we dont' want this?
// Notifiy endpoints
invoke("publishOOBText", mrlPayload);
// grab service and invoke method.
ServiceInterface s = Runtime.getService(payload.getServiceName());
if (s == null) {
log.warn("Service name in OOB/MRL tag unknown. {}", mrlPayload);
return null;
}
// TODO: should you be able to be synchronous for this
// execution?
Object result = null;
if (payload.getParams() != null) {
result = s.invoke(payload.getMethodName(), payload.getParams().toArray());
} else {
result = s.invoke(payload.getMethodName());
}
log.info("OOB PROCESSING RESULT: {}", result);
}
}
if (payloads.size() > 0) {
return payloads;
} else {
return null;
}
}
/*
* If a response comes back that has an OOB Message, publish that separately
*/
public String publishOOBText(String oobText) {
return oobText;
}
/*
* publishing method of the pub sub pair - with addResponseListener allowing
* subscriptions pub/sub routines have the following pattern
*
* publishing routine -> publishX - must be invoked to provide data to
* subscribers subscription routine -> addXListener - simply adds a Service
* listener to the notify framework any service which subscribes must
* implement -> onX(data) - this is where the data will be sent (the
* call-back)
*
*/
public Response publishResponse(Response response) {
return response;
}
/*
* Test only publishing point - for simple consumers
*/
public String publishResponseText(Response response) {
return response.msg;
}
@Override
public String publishText(String text) {
return text;
}
public void reloadSession(String session, String botName) {
reloadSession(path, session, botName);
}
public void reloadSession(String path, String username, String botname) {
// kill the bot
bot = null;
// kill the session
String sessionKey = resolveSessionKey(username, botname);
if (sessions.containsKey(sessionKey)) {
// TODO: will garbage collection clean up the bot now ?
// Or are there other handles to it?
sessions.remove(sessionKey);
}
// TODO: we should make sure we keep the same path as before.
startSession(path, username, currentBotName);
}
/*
* Persist the predicates for all known sessions in the robot.
*
*/
public void savePredicates() throws IOException {
for (String session : sessions.keySet()) {
// TODO: better parsing of this.
String[] parts = session.split("-");
String username = parts[0];
String botname = parts[1];
String sessionPredicateFilename = createSessionPredicateFilename(username, botname);
File sessionPredFile = new File(sessionPredicateFilename);
Chat chat = getChat(username, botname);
// overwrite the original file , this should always be a full set.
log.info("Writing predicate file for session {}", session);
FileWriter predWriter = new FileWriter(sessionPredFile, false);
for (String predicate : chat.predicates.keySet()) {
String value = chat.predicates.get(predicate);
predWriter.write(predicate + ":" + value + "\n");
}
predWriter.close();
}
log.info("Done saving predicates.");
}
public void setEnableAutoConversation(boolean enableAutoConversation) {
sessions.get(resolveSessionKey(currentUserName, currentBotName)).enableAutoConversation = enableAutoConversation;
}
public void setMaxConversationDelay(int maxConversationDelay) {
sessions.get(resolveSessionKey(currentUserName, currentBotName)).maxConversationDelay = maxConversationDelay;
}
public void setProcessOOB(boolean processOOB) {
sessions.get(resolveSessionKey(currentUserName, currentBotName)).processOOB = processOOB;
}
public void startSession() {
startSession(null);
}
public void startSession(String session) {
startSession(session, currentBotName);
}
public Set<String> getSessionNames() {
return sessions.keySet();
}
/**
* Load the AIML 2.0 Bot config and start a chat session. This must be called
* after the service is created.
*
* @param session
* - The new session name
* @param botName
* - The name of the bot to load. (example: alice2)
*/
public void startSession(String session, String botName) {
startSession(path, session, botName);
}
public void startSession(String path, String userName, String botName) {
this.path = path;
info("starting Chat Session path:%s username:%s botname:%s", path, userName, botName);
this.currentBotName = botName;
this.currentUserName = userName;
// Session is between a user and a bot. key is compound.
String sessionKey = resolveSessionKey(userName, botName);
if (sessions.containsKey(sessionKey)) {
warn("session %s already created", sessionKey);
return;
}
cleanOutOfDateAimlIFFiles(botName);
wasCleanyShutdowned="nok";
// TODO: manage the bots in a collective pool/hash map.
if (bot == null) {
bot = new Bot(botName, path);
} else if (!botName.equalsIgnoreCase(bot.name)) {
bot = new Bot(botName, path);
}
Chat chat = new Chat(bot);
// for (Category c : bot.brain.getCategories()) {
// log.info(c.getPattern());
// }
//
// String resp = chat.multisentenceRespond("hello");
// load session specific predicates, these override the default ones.
String sessionPredicateFilename = createSessionPredicateFilename(userName, botName);
chat.predicates.getPredicateDefaults(sessionPredicateFilename);
//
sessions.put(resolveSessionKey(currentUserName, currentBotName), new ChatData(chat));
// lets test if the robot knows the name of the person in the session
String name = chat.predicates.get("name").trim();
// TODO: this implies that the default value for "name" is default
// "Friend"
if (name == null || "Friend".equalsIgnoreCase(name) || "unknown".equalsIgnoreCase(name)) {
// TODO: find another interface that's simpler to use for this
// create a string that represents the predicates file
String inputPredicateStream = "name:" + userName;
// load those predicates
chat.predicates.getPredicateDefaultsFromInputStream(FileIO.toInputStream(inputPredicateStream));
}
// this.currentBotName = botName;
// String userName = chat.predicates.get("name");
log.info("Started session for bot name:{} , username:{}", botName, userName);
// TODO: to make sure if the start session is updated, that the button
// updates in the gui ?
this.save();
broadcastState();
}
public void setPath(String path) {
this.path = path;
}
public void writeAIML() {
bot.writeAIMLFiles();
}
public void writeAIMLIF() {
bot.writeAIMLIFFiles();
}
public void writeAndQuit() {
bot.writeQuit();
// edit moz4r : we need to change the last modification date to aimlif folder because at this time all is compilated.
// so programAb don't need to load AIML at startup
sleep(1000);
File folder = new File(bot.aimlif_path);
for (File f : folder.listFiles()) {
f.setLastModified(System.currentTimeMillis());
}
String fil=bot.aimlif_path+File.separator+"folder_updated";
File file = new File(fil);
file.delete();
try{
PrintWriter writer = new PrintWriter(fil, "UTF-8");
writer.println("");
writer.close();
} catch (IOException e) {
log.error("PrintWriter error");
}
}
@Override
public void stopService() {
try {
savePredicates();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
writeAndQuit();
wasCleanyShutdowned="ok";
super.stopService();
}
/**
* This static method returns all the details of the class without it having
* to be constructed. It has description, categories, dependencies, and peer
* definitions.
*
* @return ServiceType - returns all the data
*
*/
static public ServiceType getMetaData() {
ServiceType meta = new ServiceType(ProgramAB.class.getCanonicalName());
meta.addDescription("AIML 2.0 Reference interpreter based on Program AB");
meta.addCategory("intelligence");
// meta.addDependency("org.alicebot.ab", "0.0.6.26");
meta.addDependency("org.alicebot.ab", "0.0.4.1-kw");
meta.addDependency("org.json", "20090211");
return meta;
}
public static void main(String s[]) throws IOException {
LoggingFactory.init("INFO");
// Runtime.createAndStart("gui", "SwingGui");
Runtime.start("webgui", "WebGui");
ProgramAB ai = (ProgramAB) Runtime.createAndStart("ai", "ProgramAB");
//ai.setPath(System.getProperty("user.dir")+File.separator+"ProgramAB"+File.separator);
ai.startSession("default", "alice2");
log.info(ai.getResponse("hi there").toString());
log.info(ai.getResponse("こんにちは").toString());
// ai.savePredicates();
}
}
| src/org/myrobotlab/service/ProgramAB.java | package org.myrobotlab.service;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.alicebot.ab.AIMLMap;
import org.alicebot.ab.AIMLSet;
import org.alicebot.ab.Bot;
import org.alicebot.ab.Category;
import org.alicebot.ab.Chat;
import org.alicebot.ab.Predicates;
import org.myrobotlab.framework.Service;
import org.myrobotlab.framework.ServiceType;
import org.myrobotlab.framework.interfaces.ServiceInterface;
import org.myrobotlab.io.FileIO;
import org.myrobotlab.logging.LoggerFactory;
import org.myrobotlab.logging.LoggingFactory;
import org.myrobotlab.programab.ChatData;
import org.myrobotlab.programab.OOBPayload;
import org.myrobotlab.service.interfaces.TextListener;
import org.myrobotlab.service.interfaces.TextPublisher;
import org.slf4j.Logger;
/**
* Program AB service for MyRobotLab Uses AIML 2.0 to create a ChatBot This is a
* reboot of the Old AIML spec to be more 21st century.
*
* More Info at http://aitools.org/ProgramAB
*
* @author kwatters
*
*/
public class ProgramAB extends Service implements TextListener, TextPublisher {
transient public final static Logger log = LoggerFactory.getLogger(ProgramAB.class);
public static class Response {
public String session;
public String msg;
public List<OOBPayload> payloads;
// FIXME - timestamps are usually longs System.currentTimeMillis()
public Date timestamp;
public Response(String session, String msg, List<OOBPayload> payloads, Date timestamp) {
this.session = session;
this.msg = msg;
this.payloads = payloads;
this.timestamp = timestamp;
}
public String toString() {
return String.format("%d %s %s", timestamp.getTime(), session, msg);
}
}
transient Bot bot = null;
HashSet<String> bots = new HashSet<String>();
String path = "ProgramAB";
/**
* botName - is un-initialized to preserve serialization stickyness
*/
// String botName;
// This is the username that is chatting with the bot.
// String currentSession = "default";
// Session is a user and a bot. so the key to the session should be the
// username, and the bot name.
HashMap<String, ChatData> sessions = new HashMap<String, ChatData>();
// TODO: better parsing than a regex...
transient Pattern oobPattern = Pattern.compile("<oob>.*?</oob>", Pattern.CASE_INSENSITIVE | Pattern.DOTALL | Pattern.MULTILINE);
transient Pattern mrlPattern = Pattern.compile("<mrl>.*?</mrl>", Pattern.CASE_INSENSITIVE | Pattern.DOTALL | Pattern.MULTILINE);
// a guaranteed bot we have
String currentBotName = "alice2";
// this is the username that is chatting with the bot.
String currentUserName = "default";
static final long serialVersionUID = 1L;
static int savePredicatesInterval = 60 * 1000 * 5; // every 5 minutes
public String wasCleanyShutdowned;
public ProgramAB(String name) {
super(name);
// Tell programAB to persist it's learned predicates about people
// every 30 seconds.
addTask("savePredicates", savePredicatesInterval, 0, "savePredicates");
// TODO: Lazy load this!
// look for local bots defined
File programAbDir = new File(String.format("%s/bots", path));
if (!programAbDir.exists() || !programAbDir.isDirectory()) {
log.info("%s does not exist !!!");
} else {
File[] listOfFiles = programAbDir.listFiles();
for (int i = 0; i < listOfFiles.length; i++) {
if (listOfFiles[i].isFile()) {
// System.out.println("File " + listOfFiles[i].getName());
} else if (listOfFiles[i].isDirectory()) {
bots.add(listOfFiles[i].getName());
}
}
}
}
public void addOOBTextListener(TextListener service) {
addListener("publishOOBText", service.getName(), "onOOBText");
}
public void addResponseListener(Service service) {
addListener("publishResponse", service.getName(), "onResponse");
}
public void addTextListener(TextListener service) {
addListener("publishText", service.getName(), "onText");
}
public void addTextPublisher(TextPublisher service) {
addListener("publishText", service.getName(), "onText");
}
private void cleanOutOfDateAimlIFFiles(String botName) {
String aimlPath = path + File.separator + "bots" + File.separator + botName + File.separator + "aiml";
String aimlIFPath = path + File.separator + "bots" + File.separator + botName + File.separator + "aimlif";
log.info("AIML FILES:");
File folder = new File(aimlPath);
File folderaimlIF = new File(aimlIFPath);
if (!folder.exists()) {
log.info("{} does not exist", aimlPath);
return;
}
if (wasCleanyShutdowned == null || wasCleanyShutdowned.isEmpty())
{
wasCleanyShutdowned="firstStart";
}
if (wasCleanyShutdowned.equals("nok"))
{
if (folderaimlIF.exists()) {
warn("Bad previous shutdown, ProgramAB need to recompile AimlIf files. Don't worry.");
for (File f : folderaimlIF.listFiles()) {
f.delete();
}
}
}
log.info(folder.getAbsolutePath());
HashMap<String, Long> modifiedDates = new HashMap<String, Long>();
for (File f : folder.listFiles()) {
log.info(f.getAbsolutePath());
// TODO: better stripping of the file extension
String aiml = f.getName().replace(".aiml", "");
modifiedDates.put(aiml, f.lastModified());
}
log.info("AIMLIF FILES:");
folderaimlIF = new File(aimlIFPath);
if (!folderaimlIF.exists()) {
// TODO: throw an exception warn / log ?
log.info("aimlif directory missing,creating it. " + folderaimlIF.getAbsolutePath());
folderaimlIF.mkdirs();
return;
}
for (File f : folderaimlIF.listFiles()) {
log.info(f.getAbsolutePath());
// TODO: better stripping of the file extension
String aimlIF = f.getName().replace(".aiml.csv", "");
Long lastMod = modifiedDates.get(aimlIF);
if (lastMod != null) {
if (f.lastModified() < lastMod) {
// the AIMLIF file is newer than the AIML file.
// delete the AIMLIF file so ProgramAB recompiles it
// properly.
log.info("Deleteing AIMLIF file because the original AIML file was modified. {}", aimlIF);
f.delete();
// edit moz4r : we need to change the last modification date to aiml folder for recompilation
sleep(1000);
String fil=aimlPath+File.separator+"folder_updated";
File file = new File(fil);
file.delete();
try{
PrintWriter writer = new PrintWriter(fil, "UTF-8");
writer.println(lastMod.toString());
writer.close();
} catch (IOException e) {
// do something
}
}
}
}
}
private String createSessionPredicateFilename(String username, String botName) {
// TODO: sanitize the session label so it can be safely used as a filename
String predicatePath = path + File.separator + "bots" + File.separator + botName + File.separator + "config";
// just in case the directory doesn't exist.. make it.
File predDir = new File(predicatePath);
if (!predDir.exists()) {
predDir.mkdirs();
}
predicatePath += File.separator + username + ".predicates.txt";
return predicatePath;
}
public int getMaxConversationDelay() {
return sessions.get(resolveSessionKey(currentUserName, currentBotName)).maxConversationDelay;
}
public Response getResponse(String text) {
return getResponse(currentUserName, text);
}
/**
*
* @param text
* - the query string to the bot brain
* @param username
* - the user that is sending the query
* @param botName
* - the name of the bot you which to get the response from
* @return the response for a user from a bot given the input text.
*/
public Response getResponse(String username, String botName, String text) {
this.currentBotName = botName;
return getResponse(username, text);
}
public Response getResponse(String username, String text) {
log.info(String.format("Get Response for : user %s bot %s : %s", username, currentBotName, text));
if (bot == null) {
String error = "ERROR: Core not loaded, please load core before chatting.";
error(error);
return new Response(username, error, null, new Date());
}
String sessionKey = resolveSessionKey(username, currentBotName);
if (!sessions.containsKey(sessionKey)) {
startSession(path, username, currentBotName);
}
ChatData chatData = sessions.get(sessionKey);
String res = getChat(username, currentBotName).multisentenceRespond(text);
// grab and update the time when this response came in.
chatData.lastResponseTime = new Date();
// Check the AIML response to see if there is OOB (out of band data)
// If so, publish that data independent of the text response.
List<OOBPayload> payloads = null;
if (chatData.processOOB) {
payloads = processOOB(res);
}
// OOB text should not be published as part of the response text.
Matcher matcher = oobPattern.matcher(res);
res = matcher.replaceAll("").trim();
Response response = new Response(username, res, payloads, chatData.lastResponseTime);
// Now that we've said something, lets create a timer task to wait for N
// seconds
// and if nothing has been said.. try say something else.
// TODO: trigger a task to respond with something again
// if the humans get bored
if (chatData.enableAutoConversation) {
// schedule one future reply. (always get the last word in..)
// int numExecutions = 1;
// TODO: we need a way for the task to just execute one time
// it'd be good to have access to the timer here, but it's transient
addTask("getResponse", chatData.maxConversationDelay, 0, "getResponse", username, text);
}
// EEK! clean up the API!
invoke("publishResponse", response);
invoke("publishResponseText", response);
invoke("publishText", response.msg);
info("to: %s - %s", username, res);
// if (log.isDebugEnabled()) {
// for (String key : sessions.get(session).predicates.keySet()) {
// log.debug(session + " " + key + " " +
// sessions.get(session).predicates.get(key));
// }
// }
// TODO: wire this in so the gui updates properly. ??
// broadcastState();
return response;
}
public String resolveSessionKey(String username, String botname) {
return username + "-" + botname;
}
public void repetition_count(int val)
{
org.alicebot.ab.MagicNumbers.repetition_count=val;
}
public Chat getChat(String userName, String botName) {
String sessionKey = resolveSessionKey(userName, botName);
if (!sessions.containsKey(sessionKey)) {
error("%s session does not exist", sessionKey);
return null;
} else {
return sessions.get(sessionKey).chat;
}
}
public void removePredicate(String userName, String predicateName) {
removePredicate(userName, currentBotName, predicateName);
}
public void removePredicate(String userName, String botName, String predicateName) {
Predicates preds = getChat(userName, botName).predicates;
preds.remove(predicateName);
}
public void addToSet(String setName, String setValue) {
// add to the set for the bot.
AIMLSet updateSet = bot.setMap.get(setName);
setValue = setValue.toUpperCase().trim();
if (updateSet != null) {
updateSet.add(setValue);
// persist to disk.
updateSet.writeAIMLSet();
} else {
log.info("Unknown AIML set: {}. A new set will be created. ", setName);
// TODO: should we create a new set ? or just log this warning?
// The AIML Set doesn't exist. Lets create a new one
AIMLSet newSet = new AIMLSet(setName, bot);
newSet.add(setValue);
newSet.writeAIMLSet();
}
}
public void addToMap(String mapName, String mapKey, String mapValue) {
// add an entry to the map.
AIMLMap updateMap = bot.mapMap.get(mapName);
mapKey = mapKey.toUpperCase().trim();
if (updateMap != null) {
updateMap.put(mapKey, mapValue);
// persist to disk!
updateMap.writeAIMLMap();
} else {
log.info("Unknown AIML map: {}. A new MAP will be created. ", mapName);
// dynamically create new maps?!
AIMLMap newMap = new AIMLMap(mapName, bot);
newMap.put(mapKey, mapValue);
newMap.writeAIMLMap();
}
}
public void setPredicate(String username, String predicateName, String predicateValue) {
Predicates preds = getChat(username, currentBotName).predicates;
preds.put(predicateName, predicateValue);
}
public void unsetPredicate(String username, String predicateName) {
Predicates preds = getChat(username, currentBotName).predicates;
preds.remove(predicateName);
}
public String getPredicate(String username, String predicateName) {
Predicates preds = getChat(username, currentBotName).predicates;
return preds.get(predicateName);
}
/**
* Only respond if the last response was longer than delay ms ago
*
* @param session
* - current session/username
* @param text
* - text to get a response for
* @param delay
* - min amount of time that must have transpired since the last
* response.
* @return the response
*/
public Response getResponse(String session, String text, Long delay) {
ChatData chatData = sessions.get(session);
long delta = System.currentTimeMillis() - chatData.lastResponseTime.getTime();
if (delta > delay) {
return getResponse(session, text);
} else {
return null;
}
}
public boolean isEnableAutoConversation() {
return sessions.get(resolveSessionKey(currentUserName, currentBotName)).enableAutoConversation;
}
public boolean isProcessOOB() {
return sessions.get(resolveSessionKey(currentUserName, currentBotName)).processOOB;
}
/**
* Return a list of all patterns that the AIML Bot knows to match against.
*
* @param botName the bots name from which to return it's patterns.
* @return a list of all patterns loaded into the aiml brain
*/
public ArrayList<String> listPatterns(String botName) {
ArrayList<String> patterns = new ArrayList<String>();
for (Category c : bot.brain.getCategories()) {
patterns.add(c.getPattern());
}
return patterns;
}
/**
* Return the number of milliseconds since the last response was given -1 if a
* response has never been given.
* @return milliseconds
*/
public long millisecondsSinceLastResponse() {
ChatData chatData = sessions.get(resolveSessionKey(currentUserName, currentBotName));
if (chatData.lastResponseTime == null) {
return -1;
}
long delta = System.currentTimeMillis() - chatData.lastResponseTime.getTime();
return delta;
}
@Override
public void onText(String text) {
// What else should we do here? seems reasonable to just do this.
// this should actually call getResponse
// on input, get the proper response
// Response resp = getResponse(text);
getResponse(text);
// push that to the next end point.
// invoke("publishText", resp.msg);
}
private OOBPayload parseOOB(String oobPayload) {
// TODO: fix the damn double encoding issue.
// we have user entered text in the service/method
// and params values.
// grab the service
Pattern servicePattern = Pattern.compile("<service>(.*?)</service>", Pattern.CASE_INSENSITIVE | Pattern.DOTALL | Pattern.MULTILINE);
Matcher serviceMatcher = servicePattern.matcher(oobPayload);
serviceMatcher.find();
String serviceName = serviceMatcher.group(1);
Pattern methodPattern = Pattern.compile("<method>(.*?)</method>", Pattern.CASE_INSENSITIVE | Pattern.DOTALL | Pattern.MULTILINE);
Matcher methodMatcher = methodPattern.matcher(oobPayload);
methodMatcher.find();
String methodName = methodMatcher.group(1);
Pattern paramPattern = Pattern.compile("<param>(.*?)</param>", Pattern.CASE_INSENSITIVE | Pattern.DOTALL | Pattern.MULTILINE);
Matcher paramMatcher = paramPattern.matcher(oobPayload);
ArrayList<String> params = new ArrayList<String>();
while (paramMatcher.find()) {
// We found some OOB text.
// assume only one OOB in the text?
String param = paramMatcher.group(1);
params.add(param);
}
OOBPayload payload = new OOBPayload(serviceName, methodName, params);
// log.info(payload.toString());
return payload;
// JAXB stuff blows up because the response from program ab is already
// xml decoded!
//
// JAXBContext jaxbContext;
// try {
// jaxbContext = JAXBContext.newInstance(OOBPayload.class);
// Unmarshaller jaxbUnmarshaller = jaxbContext.createUnmarshaller();
// log.info("OOB PAYLOAD :" + oobPayload);
// Reader r = new StringReader(oobPayload);
// OOBPayload oobMsg = (OOBPayload) jaxbUnmarshaller.unmarshal(r);
// return oobMsg;
// } catch (JAXBException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
// log.info("OOB tag found, but it's not an MRL tag. {}", oobPayload);
// return null;
}
private List<OOBPayload> processOOB(String text) {
// Find any oob tags
ArrayList<OOBPayload> payloads = new ArrayList<OOBPayload>();
Matcher oobMatcher = oobPattern.matcher(text);
while (oobMatcher.find()) {
// We found some OOB text.
// assume only one OOB in the text?
String oobPayload = oobMatcher.group(0);
Matcher mrlMatcher = mrlPattern.matcher(oobPayload);
while (mrlMatcher.find()) {
String mrlPayload = mrlMatcher.group(0);
OOBPayload payload = parseOOB(mrlPayload);
payloads.add(payload);
// TODO: maybe we dont' want this?
// Notifiy endpoints
invoke("publishOOBText", mrlPayload);
// grab service and invoke method.
ServiceInterface s = Runtime.getService(payload.getServiceName());
if (s == null) {
log.warn("Service name in OOB/MRL tag unknown. {}", mrlPayload);
return null;
}
// TODO: should you be able to be synchronous for this
// execution?
Object result = null;
if (payload.getParams() != null) {
result = s.invoke(payload.getMethodName(), payload.getParams().toArray());
} else {
result = s.invoke(payload.getMethodName());
}
log.info("OOB PROCESSING RESULT: {}", result);
}
}
if (payloads.size() > 0) {
return payloads;
} else {
return null;
}
}
/*
* If a response comes back that has an OOB Message, publish that separately
*/
public String publishOOBText(String oobText) {
return oobText;
}
/*
* publishing method of the pub sub pair - with addResponseListener allowing
* subscriptions pub/sub routines have the following pattern
*
* publishing routine -> publishX - must be invoked to provide data to
* subscribers subscription routine -> addXListener - simply adds a Service
* listener to the notify framework any service which subscribes must
* implement -> onX(data) - this is where the data will be sent (the
* call-back)
*
*/
public Response publishResponse(Response response) {
return response;
}
/*
* Test only publishing point - for simple consumers
*/
public String publishResponseText(Response response) {
return response.msg;
}
@Override
public String publishText(String text) {
return text;
}
public void reloadSession(String session, String botName) {
reloadSession(path, session, botName);
}
public void reloadSession(String path, String username, String botname) {
// kill the bot
bot = null;
// kill the session
String sessionKey = resolveSessionKey(username, botname);
if (sessions.containsKey(sessionKey)) {
// TODO: will garbage collection clean up the bot now ?
// Or are there other handles to it?
sessions.remove(sessionKey);
}
// TODO: we should make sure we keep the same path as before.
startSession(path, username, currentBotName);
}
/*
* Persist the predicates for all known sessions in the robot.
*
*/
public void savePredicates() throws IOException {
for (String session : sessions.keySet()) {
// TODO: better parsing of this.
String[] parts = session.split("-");
String username = parts[0];
String botname = parts[1];
String sessionPredicateFilename = createSessionPredicateFilename(username, botname);
File sessionPredFile = new File(sessionPredicateFilename);
Chat chat = getChat(username, botname);
// overwrite the original file , this should always be a full set.
log.info("Writing predicate file for session {}", session);
FileWriter predWriter = new FileWriter(sessionPredFile, false);
for (String predicate : chat.predicates.keySet()) {
String value = chat.predicates.get(predicate);
predWriter.write(predicate + ":" + value + "\n");
}
predWriter.close();
}
log.info("Done saving predicates.");
}
public void setEnableAutoConversation(boolean enableAutoConversation) {
sessions.get(resolveSessionKey(currentUserName, currentBotName)).enableAutoConversation = enableAutoConversation;
}
public void setMaxConversationDelay(int maxConversationDelay) {
sessions.get(resolveSessionKey(currentUserName, currentBotName)).maxConversationDelay = maxConversationDelay;
}
public void setProcessOOB(boolean processOOB) {
sessions.get(resolveSessionKey(currentUserName, currentBotName)).processOOB = processOOB;
}
public void startSession() {
startSession(null);
}
public void startSession(String session) {
startSession(session, currentBotName);
}
public Set<String> getSessionNames() {
return sessions.keySet();
}
/**
* Load the AIML 2.0 Bot config and start a chat session. This must be called
* after the service is created.
*
* @param session
* - The new session name
* @param botName
* - The name of the bot to load. (example: alice2)
*/
public void startSession(String session, String botName) {
startSession(path, session, botName);
}
public void startSession(String path, String userName, String botName) {
this.path = path;
info("starting Chat Session path:%s username:%s botname:%s", path, userName, botName);
this.currentBotName = botName;
this.currentUserName = userName;
// Session is between a user and a bot. key is compound.
String sessionKey = resolveSessionKey(userName, botName);
if (sessions.containsKey(sessionKey)) {
warn("session %s already created", sessionKey);
return;
}
cleanOutOfDateAimlIFFiles(botName);
wasCleanyShutdowned="nok";
// TODO: manage the bots in a collective pool/hash map.
if (bot == null) {
bot = new Bot(botName, path);
} else if (!botName.equalsIgnoreCase(bot.name)) {
bot = new Bot(botName, path);
}
Chat chat = new Chat(bot);
// for (Category c : bot.brain.getCategories()) {
// log.info(c.getPattern());
// }
//
// String resp = chat.multisentenceRespond("hello");
// load session specific predicates, these override the default ones.
String sessionPredicateFilename = createSessionPredicateFilename(userName, botName);
chat.predicates.getPredicateDefaults(sessionPredicateFilename);
//
sessions.put(resolveSessionKey(currentUserName, currentBotName), new ChatData(chat));
// lets test if the robot knows the name of the person in the session
String name = chat.predicates.get("name").trim();
// TODO: this implies that the default value for "name" is default
// "Friend"
if (name == null || "Friend".equalsIgnoreCase(name) || "unknown".equalsIgnoreCase(name)) {
// TODO: find another interface that's simpler to use for this
// create a string that represents the predicates file
String inputPredicateStream = "name:" + userName;
// load those predicates
chat.predicates.getPredicateDefaultsFromInputStream(FileIO.toInputStream(inputPredicateStream));
}
// this.currentBotName = botName;
// String userName = chat.predicates.get("name");
log.info("Started session for bot name:{} , username:{}", botName, userName);
// TODO: to make sure if the start session is updated, that the button
// updates in the gui ?
this.save();
broadcastState();
}
public void setPath(String path) {
this.path = path;
}
public void writeAIML() {
bot.writeAIMLFiles();
}
public void writeAIMLIF() {
bot.writeAIMLIFFiles();
}
public void writeAndQuit() {
bot.writeQuit();
// edit moz4r : we need to change the last modification date to aimlif folder because at this time all is compilated.
// so programAb don't need to load AIML at startup
sleep(1000);
File folder = new File(bot.aimlif_path);
for (File f : folder.listFiles()) {
f.setLastModified(System.currentTimeMillis());
}
String fil=bot.aimlif_path+File.separator+"folder_updated";
File file = new File(fil);
file.delete();
try{
PrintWriter writer = new PrintWriter(fil, "UTF-8");
writer.println("");
writer.close();
} catch (IOException e) {
log.error("PrintWriter error");
}
}
@Override
public void stopService() {
try {
savePredicates();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
writeAndQuit();
wasCleanyShutdowned="ok";
super.stopService();
}
/**
* This static method returns all the details of the class without it having
* to be constructed. It has description, categories, dependencies, and peer
* definitions.
*
* @return ServiceType - returns all the data
*
*/
static public ServiceType getMetaData() {
ServiceType meta = new ServiceType(ProgramAB.class.getCanonicalName());
meta.addDescription("AIML 2.0 Reference interpreter based on Program AB");
meta.addCategory("intelligence");
// meta.addDependency("org.alicebot.ab", "0.0.6.26");
meta.addDependency("org.alicebot.ab", "0.0.4.1-kw");
meta.addDependency("org.json", "20090211");
return meta;
}
public static void main(String s[]) throws IOException {
LoggingFactory.init("INFO");
// Runtime.createAndStart("gui", "SwingGui");
Runtime.start("webgui", "WebGui");
ProgramAB ai = (ProgramAB) Runtime.createAndStart("ai", "ProgramAB");
//ai.setPath(System.getProperty("user.dir")+File.separator+"ProgramAB"+File.separator);
ai.startSession("default", "alice2");
log.info(ai.getResponse("hi there").toString());
log.info(ai.getResponse("こんにちは").toString());
// ai.savePredicates();
}
}
| removing that unnecessary warning, a little code cleanup
| src/org/myrobotlab/service/ProgramAB.java | removing that unnecessary warning, a little code cleanup |
|
Java | apache-2.0 | 19675cdf1ed905e2e77b95b57ece9d48b80d8aea | 0 | mneri/csv | /*
* Copyright 2018 Massimo Neri <[email protected]>
*
* This file is part of mneri/csv.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.mneri.csv;
import java.io.*;
import java.nio.charset.Charset;
import java.util.NoSuchElementException;
/**
* Read csv streams and automatically transform lines into Java objects.
*
* @param <T> The type of the Java objects to read.
* @author Massimo Neri <<a href="mailto:[email protected]">[email protected]</a>>
*/
public final class CsvReader<T> implements Closeable {
//@formatter:off
private static final int SOL = 0; // Start of line
private static final int SOF = 6; // Start of field
private static final int QOT = 12; // Quotation
private static final int ESC = 18; // Escape
private static final int TXT = 24; // Text
private static final int CAR = 30; // Carriage return
private static final int EOL = 36; // End of line
private static final int EOF = 42; // End of file
private static final int ERR = 48; // Error
private static final int STATE_MASK = 63;
private static final int NOP = 0; // No operation
private static final int APP = 1 << 6; // Append
private static final int MKF = 1 << 7; // Make field
private static final int MKL = 1 << 8; // Make line
private static final int[] TRANSACT = {
// * " , \r \n EOF
TXT | APP , QOT | NOP , SOF | MKF , CAR | NOP , EOL | MKF | MKL, EOF | NOP , // SOL
TXT | APP , QOT | NOP , SOF | MKF , CAR | NOP , EOL | MKF | MKL, EOF | MKF | MKL, // SOF
QOT | APP , ESC | NOP , QOT | APP , QOT | APP , QOT | APP , ERR | NOP , // QOT
ERR | NOP , QOT | APP , SOF | MKF , CAR | NOP , EOL | MKF | MKL, EOF | MKF | MKL, // ESC
TXT | APP , TXT | APP , SOF | MKF , CAR | NOP , EOL | MKF | MKL, EOF | MKF | MKL, // TXT
ERR | NOP , ERR | NOP , ERR | NOP , ERR | NOP , EOL | MKF | MKL, ERR | NOP }; // CAR
//@formatter:on
//@formatter:off
private static final int ELEMENT_NOT_READ = 0;
private static final int ELEMENT_READ = 1;
private static final int NO_SUCH_ELEMENT = 2;
private static final int CLOSED = 3;
//@formatter:on
private static final int DEFAULT_BUFFER_SIZE = 8192;
private final char[] buffer;
private final char delimiter;
private final CsvDeserializer<T> deserializer;
private final RecyclableCsvLine line;
private int lines;
private int next;
private final char quotation;
private final Reader reader;
private int size;
private int state = ELEMENT_NOT_READ;
private CsvReader(Reader reader, CsvOptions options, CsvDeserializer<T> deserializer) {
this.reader = reader;
this.deserializer = deserializer;
buffer = new char[DEFAULT_BUFFER_SIZE];
line = new RecyclableCsvLine();
delimiter = options.getDelimiter();
quotation = options.getQuotation();
}
private void checkClosedState() {
if (state == CLOSED) {
throw new IllegalStateException("The reader is closed.");
}
}
/**
* Closes the stream and releases any system resources associated with it. Once the stream has been closed, further
* {@link CsvReader#hasNext()}, {@link CsvReader#next()} and {@link CsvReader#skip(int)} invocations will throw an
* {@link IOException}. Closing a previously closed stream has no effect.
*
* @throws IOException if an I/O error occurs.
*/
@Override
public void close() throws IOException {
if (state == CLOSED) {
return;
}
state = CLOSED;
reader.close();
}
private int columnOf(int charCode) {
switch (charCode) {
//@formatter:off
case '\r': return 3;
case '\n': return 4;
case -1 : return 5; // EOF
default : if (charCode == delimiter) return 2;
if (charCode == quotation) return 1;
return 0;
//@formatter:on
}
}
/**
* Returns {@code true} if the reader has more elements. (In other words, returns {@code true} if
* {@link CsvReader#next()} would return an element rather than throwing an exception).
*
* @return {@code true} if the reader has more elements.
* @throws CsvException if the csv is not properly formatted.
* @throws IOException if an I/O error occurs.
*/
public boolean hasNext() throws CsvException, IOException {
//@formatter:off
if (state == ELEMENT_READ) { return true; }
else if (state == NO_SUCH_ELEMENT) { return false; }
//@formatter:on
checkClosedState();
int row = SOL;
int nextChar;
do {
nextChar = read();
int column = columnOf(nextChar);
int transact = TRANSACT[row + column];
if ((transact & APP) != 0) {
line.append(nextChar);
} else if ((transact & MKF) != 0) {
line.markField();
if ((transact & MKL) != 0) {
lines++;
state = ELEMENT_READ;
return true;
}
}
row = transact & STATE_MASK;
} while (row < EOF);
if (row == EOF) {
state = NO_SUCH_ELEMENT;
return false;
}
throw new UnexpectedCharacterException(lines, nextChar);
}
/**
* Return the next element in the reader.
*
* @return The next element.
* @throws CsvException if the csv is not properly formatted.
* @throws IOException if an I/O error occurs.
*/
public T next() throws CsvException, IOException {
if (!hasNext()) {
throw new NoSuchElementException();
}
try {
T element = deserializer.deserialize(line);
state = ELEMENT_NOT_READ;
line.clear();
return element;
} catch (Exception e) {
throw new CsvConversionException(line, e);
}
}
/**
* Opens a file for reading, returning a {@code CsvReader}. Bytes from the file are decoded into characters using
* the default JVM charset. Reading commences at the beginning of the file.
*
* @param file the file to open.
* @param deserializer the deserializer used to convert csv lines into objects.
* @param <T> the type of the objects to read.
* @return A new {@code CsvReader} to read the specified file.
* @throws IOException if an I/O error occurs.
*/
public static <T> CsvReader<T> open(File file, CsvDeserializer<T> deserializer) throws IOException {
return open(file, CsvOptions.defaultOptions(), deserializer);
}
/**
* Opens a file for reading, returning a {@code CsvReader}. Bytes from the file are decoded into characters using
* the default JVM charset. Reading commences at the beginning of the file.
*
* @param file the file to open.
* @param options reading options.
* @param deserializer the deserializer used to convert csv lines into objects.
* @param <T> the type of the objects to read.
* @return A new {@code CsvReader} to read the specified file.
* @throws IOException if an I/O error occurs.
*/
public static <T> CsvReader<T> open(File file, CsvOptions options, CsvDeserializer<T> deserializer)
throws IOException {
return open(file, TextUtil.defaultCharset(), options, deserializer);
}
/**
* Opens a file for reading, returning a {@code CsvReader}. Bytes from the file are decoded into characters using
* the specified charset. Reading commences at the beginning of the file.
*
* @param file the file to open.
* @param charset the charset of the file.
* @param deserializer the deserializer used to convert csv lines into objects.
* @param <T> the type of the objects to read.
* @return A new {@code CsvReader} to read the specified file.
* @throws IOException if an I/O error occurs.
*/
public static <T> CsvReader<T> open(File file, Charset charset, CsvDeserializer<T> deserializer) throws IOException {
return open(file, charset, CsvOptions.defaultOptions(), deserializer);
}
/**
* Opens a file for reading, returning a {@code CsvReader}. Bytes from the file are decoded into characters using
* the specified charset. Reading commences at the beginning of the file.
*
* @param file the file to open.
* @param charset the charset of the file.
* @param options reading options.
* @param deserializer the deserializer used to convert csv lines into objects.
* @param <T> the type of the objects to read.
* @return A new {@code CsvReader} to read the specified file.
* @throws IOException if an I/O error occurs.
*/
public static <T> CsvReader<T> open(File file, Charset charset, CsvOptions options, CsvDeserializer<T> deserializer)
throws IOException {
return open(new InputStreamReader(new FileInputStream(file), charset), options, deserializer);
}
/**
* Return a new {@code CsvReader} using the specified {@link Reader} for reading. Bytes from the file are decoded
* into characters using the reader's charset. Reading commences at the point specified by the reader.
*
* @param reader the {@link Reader} to read from.
* @param deserializer the deserializer used to convert csv lines into objects.
* @param <T> the type of the objects to read.
* @return A new {@code CsvReader} to read the specified file.
*/
public static <T> CsvReader<T> open(Reader reader, CsvDeserializer<T> deserializer) {
return open(reader, CsvOptions.defaultOptions(), deserializer);
}
/**
* Return a new {@code CsvReader} using the specified {@link Reader} for reading. Bytes from the file are decoded
* into characters using the reader's charset. Reading commences at the point specified by the reader.
*
* @param reader the {@link Reader} to read from.
* @param options reading options.
* @param deserializer the deserializer used to convert csv lines into objects.
* @param <T> the type of the objects to read.
* @return A new {@code CsvReader} to read the specified file.
*/
public static <T> CsvReader<T> open(Reader reader, CsvOptions options, CsvDeserializer<T> deserializer) {
return new CsvReader<>(reader, options, deserializer);
}
private int read() throws IOException {
if (next >= size) {
if ((size = reader.read(buffer, 0, buffer.length)) < 0) {
return -1;
}
next = 0;
}
return buffer[next++];
}
/**
* Skip the next elements of the reader.
*
* @param n The number of elements to skip.
* @throws CsvException if the csv is not properly formatted.
* @throws IOException if an I/O error occurs.
*/
public void skip(int n) throws CsvException, IOException {
checkClosedState();
if (state == NO_SUCH_ELEMENT) {
return;
}
int toSkip = n;
if (state == ELEMENT_READ) {
state = ELEMENT_NOT_READ;
line.clear();
if (--toSkip == 0) {
return;
}
}
int row = SOL;
int nextChar;
do {
nextChar = read();
int column = columnOf(nextChar);
int transact = TRANSACT[row + column];
if ((transact & MKL) != 0) {
lines++;
if (--toSkip == 0) {
return;
}
row = SOL;
} else {
row = transact & STATE_MASK;
}
} while (row < EOF);
if (row == EOF) {
state = NO_SUCH_ELEMENT;
return;
}
throw new UnexpectedCharacterException(lines, nextChar);
}
}
| src/main/java/me/mneri/csv/CsvReader.java | /*
* Copyright 2018 Massimo Neri <[email protected]>
*
* This file is part of mneri/csv.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.mneri.csv;
import java.io.*;
import java.nio.charset.Charset;
import java.util.NoSuchElementException;
/**
* Read csv streams and automatically transform lines into Java objects.
*
* @param <T> The type of the Java objects to read.
* @author Massimo Neri <<a href="mailto:[email protected]">[email protected]</a>>
*/
public final class CsvReader<T> implements Closeable {
//@formatter:off
private static final int SOL = 0; // Start of line
private static final int SOF = 6; // Start of field
private static final int QOT = 12; // Quotation
private static final int ESC = 18; // Escape
private static final int TXT = 24; // Text
private static final int CAR = 30; // Carriage return
private static final int EOL = 36; // End of line
private static final int EOF = 42; // End of file
private static final int ERR = 48; // Error
private static final int STATE_MASK = 63;
private static final int NOP = 0; // No operation
private static final int APP = 1 << 6; // Append
private static final int MKF = 1 << 7; // Make field
private static final int MKL = 1 << 8; // Make line
private static final int[] TRANSACT = {
// * " , \r \n EOF
TXT | APP , QOT | NOP , SOF | MKF , CAR | NOP , EOL | MKF | MKL, EOF | NOP , // SOL
TXT | APP , QOT | NOP , SOF | MKF , CAR | NOP , EOL | MKF | MKL, EOF | MKF | MKL, // SOF
QOT | APP , ESC | NOP , QOT | APP , QOT | APP , QOT | APP , ERR | NOP , // QOT
ERR | NOP , QOT | APP , SOF | MKF , CAR | NOP , EOL | MKF | MKL, EOF | MKF | MKL, // ESC
TXT | APP , TXT | APP , SOF | MKF , CAR | NOP , EOL | MKF | MKL, EOF | MKF | MKL, // TXT
ERR | NOP , ERR | NOP , ERR | NOP , ERR | NOP , EOL | MKF | MKL, ERR | NOP }; // CAR
//@formatter:on
//@formatter:off
private static final int ELEMENT_NOT_READ = 0;
private static final int ELEMENT_READ = 1;
private static final int NO_SUCH_ELEMENT = 2;
private static final int CLOSED = 3;
//@formatter:on
private static final int DEFAULT_BUFFER_SIZE = 8192;
private char[] buffer;
private final char delimiter;
private CsvDeserializer<T> deserializer;
private RecyclableCsvLine line;
private int lines;
private int next;
private final char quotation;
private Reader reader;
private int size;
private int state = ELEMENT_NOT_READ;
private CsvReader(Reader reader, CsvOptions options, CsvDeserializer<T> deserializer) {
this.reader = reader;
this.deserializer = deserializer;
buffer = new char[DEFAULT_BUFFER_SIZE];
line = new RecyclableCsvLine();
delimiter = options.getDelimiter();
quotation = options.getQuotation();
}
private void checkClosedState() {
if (state == CLOSED) {
throw new IllegalStateException("The reader is closed.");
}
}
/**
* Closes the stream and releases any system resources associated with it. Once the stream has been closed, further
* {@link CsvReader#hasNext()}, {@link CsvReader#next()} and {@link CsvReader#skip(int)} invocations will throw an
* {@link IOException}. Closing a previously closed stream has no effect.
*
* @throws IOException if an I/O error occurs.
*/
@Override
public void close() throws IOException {
if (state == CLOSED) {
return;
}
state = CLOSED;
buffer = null;
deserializer = null;
line = null;
reader.close();
reader = null;
}
private int columnOf(int charCode) {
switch (charCode) {
//@formatter:off
case '\r': return 3;
case '\n': return 4;
case -1 : return 5; // EOF
default : if (charCode == delimiter) return 2;
if (charCode == quotation) return 1;
return 0;
//@formatter:on
}
}
/**
* Returns {@code true} if the reader has more elements. (In other words, returns {@code true} if
* {@link CsvReader#next()} would return an element rather than throwing an exception).
*
* @return {@code true} if the reader has more elements.
* @throws CsvException if the csv is not properly formatted.
* @throws IOException if an I/O error occurs.
*/
public boolean hasNext() throws CsvException, IOException {
//@formatter:off
if (state == ELEMENT_READ) { return true; }
else if (state == NO_SUCH_ELEMENT) { return false; }
//@formatter:on
checkClosedState();
int row = SOL;
int nextChar;
do {
nextChar = read();
int column = columnOf(nextChar);
int transact = TRANSACT[row + column];
if ((transact & APP) != 0) {
line.append(nextChar);
} else if ((transact & MKF) != 0) {
line.markField();
if ((transact & MKL) != 0) {
lines++;
state = ELEMENT_READ;
return true;
}
}
row = transact & STATE_MASK;
} while (row < EOF);
if (row == EOF) {
state = NO_SUCH_ELEMENT;
return false;
}
throw new UnexpectedCharacterException(lines, nextChar);
}
/**
* Return the next element in the reader.
*
* @return The next element.
* @throws CsvException if the csv is not properly formatted.
* @throws IOException if an I/O error occurs.
*/
public T next() throws CsvException, IOException {
if (!hasNext()) {
throw new NoSuchElementException();
}
try {
T element = deserializer.deserialize(line);
state = ELEMENT_NOT_READ;
line.clear();
return element;
} catch (Exception e) {
throw new CsvConversionException(line, e);
}
}
/**
* Opens a file for reading, returning a {@code CsvReader}. Bytes from the file are decoded into characters using
* the default JVM charset. Reading commences at the beginning of the file.
*
* @param file the file to open.
* @param deserializer the deserializer used to convert csv lines into objects.
* @param <T> the type of the objects to read.
* @return A new {@code CsvReader} to read the specified file.
* @throws IOException if an I/O error occurs.
*/
public static <T> CsvReader<T> open(File file, CsvDeserializer<T> deserializer) throws IOException {
return open(file, CsvOptions.defaultOptions(), deserializer);
}
/**
* Opens a file for reading, returning a {@code CsvReader}. Bytes from the file are decoded into characters using
* the default JVM charset. Reading commences at the beginning of the file.
*
* @param file the file to open.
* @param options reading options.
* @param deserializer the deserializer used to convert csv lines into objects.
* @param <T> the type of the objects to read.
* @return A new {@code CsvReader} to read the specified file.
* @throws IOException if an I/O error occurs.
*/
public static <T> CsvReader<T> open(File file, CsvOptions options, CsvDeserializer<T> deserializer)
throws IOException {
return open(file, TextUtil.defaultCharset(), options, deserializer);
}
/**
* Opens a file for reading, returning a {@code CsvReader}. Bytes from the file are decoded into characters using
* the specified charset. Reading commences at the beginning of the file.
*
* @param file the file to open.
* @param charset the charset of the file.
* @param deserializer the deserializer used to convert csv lines into objects.
* @param <T> the type of the objects to read.
* @return A new {@code CsvReader} to read the specified file.
* @throws IOException if an I/O error occurs.
*/
public static <T> CsvReader<T> open(File file, Charset charset, CsvDeserializer<T> deserializer) throws IOException {
return open(file, charset, CsvOptions.defaultOptions(), deserializer);
}
/**
* Opens a file for reading, returning a {@code CsvReader}. Bytes from the file are decoded into characters using
* the specified charset. Reading commences at the beginning of the file.
*
* @param file the file to open.
* @param charset the charset of the file.
* @param options reading options.
* @param deserializer the deserializer used to convert csv lines into objects.
* @param <T> the type of the objects to read.
* @return A new {@code CsvReader} to read the specified file.
* @throws IOException if an I/O error occurs.
*/
public static <T> CsvReader<T> open(File file, Charset charset, CsvOptions options, CsvDeserializer<T> deserializer)
throws IOException {
return open(new InputStreamReader(new FileInputStream(file), charset), options, deserializer);
}
/**
* Return a new {@code CsvReader} using the specified {@link Reader} for reading. Bytes from the file are decoded
* into characters using the reader's charset. Reading commences at the point specified by the reader.
*
* @param reader the {@link Reader} to read from.
* @param deserializer the deserializer used to convert csv lines into objects.
* @param <T> the type of the objects to read.
* @return A new {@code CsvReader} to read the specified file.
*/
public static <T> CsvReader<T> open(Reader reader, CsvDeserializer<T> deserializer) {
return open(reader, CsvOptions.defaultOptions(), deserializer);
}
/**
* Return a new {@code CsvReader} using the specified {@link Reader} for reading. Bytes from the file are decoded
* into characters using the reader's charset. Reading commences at the point specified by the reader.
*
* @param reader the {@link Reader} to read from.
* @param options reading options.
* @param deserializer the deserializer used to convert csv lines into objects.
* @param <T> the type of the objects to read.
* @return A new {@code CsvReader} to read the specified file.
*/
public static <T> CsvReader<T> open(Reader reader, CsvOptions options, CsvDeserializer<T> deserializer) {
return new CsvReader<>(reader, options, deserializer);
}
private int read() throws IOException {
if (next >= size) {
if ((size = reader.read(buffer, 0, buffer.length)) < 0) {
return -1;
}
next = 0;
}
return buffer[next++];
}
/**
* Skip the next elements of the reader.
*
* @param n The number of elements to skip.
* @throws CsvException if the csv is not properly formatted.
* @throws IOException if an I/O error occurs.
*/
public void skip(int n) throws CsvException, IOException {
checkClosedState();
if (state == NO_SUCH_ELEMENT) {
return;
}
int toSkip = n;
if (state == ELEMENT_READ) {
state = ELEMENT_NOT_READ;
line.clear();
if (--toSkip == 0) {
return;
}
}
int row = SOL;
int nextChar;
do {
nextChar = read();
int column = columnOf(nextChar);
int transact = TRANSACT[row + column];
if ((transact & MKL) != 0) {
lines++;
if (--toSkip == 0) {
return;
}
row = SOL;
} else {
row = transact & STATE_MASK;
}
} while (row < EOF);
if (row == EOF) {
state = NO_SUCH_ELEMENT;
return;
}
throw new UnexpectedCharacterException(lines, nextChar);
}
}
| Performance improvement
| src/main/java/me/mneri/csv/CsvReader.java | Performance improvement |
|
Java | apache-2.0 | 18afecf166e0d06cfe9346e00898583a0f281c25 | 0 | NLeSC/vbrowser,NLeSC/vbrowser,NLeSC/vbrowser,NLeSC/vbrowser | /*
* Copyrighted 2012-2013 Netherlands eScience Center.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* For details, see the LICENCE.txt file location in the root directory of this
* distribution or obtain the Apache License at the following location:
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For the full license, see: LICENCE.txt (located in the root folder of this distribution).
* ---
*/
// source:
package vfs;
import java.net.URI;
import junit.framework.Assert;
import test.TestSettings;
import nl.esciencecenter.octopus.credentials.Credential;
import nl.esciencecenter.octopus.credentials.Credentials;
import nl.esciencecenter.octopus.files.AbsolutePath;
import nl.esciencecenter.octopus.files.FileSystem;
import nl.esciencecenter.octopus.files.RelativePath;
import nl.esciencecenter.vbrowser.vrs.octopus.OctopusFSFactory;
import nl.nlesc.vlet.VletConfig;
import nl.nlesc.vlet.vfs.VFSClient;
import nl.nlesc.vlet.vrl.VRL;
import nl.nlesc.vlet.vrs.VRS;
public class TestVFS_OctopusLocalFS extends TestVFS
{
private static final VFSClient vfs=null;
static
{
try
{
initOctopus();
}
catch (Exception e)
{
e.printStackTrace();
}
}
public static VFSClient initOctopus() throws Exception
{
if (vfs!=null)
return vfs;
VletConfig.init();
VRS.getRegistry().unregisterVRSDriverClass(nl.nlesc.vlet.vdriver.vfs.localfs.LocalFSFactory.class);
VRS.getRegistry().registerVRSDriverClass(OctopusFSFactory.class);
VFSClient vfs=VFSClient.getDefault();
return vfs;
}
@Override
public VRL getRemoteLocation()
{
return TestSettings.getTestLocation(TestSettings.VFS_LOCALFS_LOCATION);
}
}
| nl.nlesc.vlet.vrs.tests/testsrc/vfs/TestVFS_OctopusLocalFS.java | /*
* Copyrighted 2012-2013 Netherlands eScience Center.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* For details, see the LICENCE.txt file location in the root directory of this
* distribution or obtain the Apache License at the following location:
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For the full license, see: LICENCE.txt (located in the root folder of this distribution).
* ---
*/
// source:
package vfs;
import test.TestSettings;
import nl.esciencecenter.vbrowser.vrs.octopus.OctopusFSFactory;
import nl.nlesc.vlet.VletConfig;
import nl.nlesc.vlet.vfs.VFSClient;
import nl.nlesc.vlet.vrl.VRL;
import nl.nlesc.vlet.vrs.VRS;
public class TestVFS_OctopusLocalFS extends TestVFS
{
private static final VFSClient vfs=null;
static
{
try
{
initOctopus();
}
catch (Exception e)
{
e.printStackTrace();
}
}
public static VFSClient initOctopus() throws Exception
{
if (vfs!=null)
return vfs;
VletConfig.init();
VRS.getRegistry().unregisterVRSDriverClass(nl.nlesc.vlet.vdriver.vfs.localfs.LocalFSFactory.class);
VRS.getRegistry().registerVRSDriverClass(OctopusFSFactory.class);
VFSClient vfs=VFSClient.getDefault();
return vfs;
}
@Override
public VRL getRemoteLocation()
{
return TestSettings.getTestLocation(TestSettings.VFS_LOCALFS_LOCATION);
}
}
| Added Sftp Tests for Octopus.
| nl.nlesc.vlet.vrs.tests/testsrc/vfs/TestVFS_OctopusLocalFS.java | Added Sftp Tests for Octopus. |
|
Java | apache-2.0 | 6cbf508f3769c2055fe3f6b2ea358cccff6e02cf | 0 | GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit | // Copyright (C) 2016 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.submit;
import static com.google.common.base.MoreObjects.firstNonNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.gerrit.server.notedb.ReviewerStateInternal.REVIEWER;
import static com.google.gerrit.server.project.ProjectCache.illegalState;
import static java.util.Comparator.comparing;
import static java.util.Objects.requireNonNull;
import com.google.common.flogger.FluentLogger;
import com.google.gerrit.entities.BranchNameKey;
import com.google.gerrit.entities.Change;
import com.google.gerrit.entities.ChangeMessage;
import com.google.gerrit.entities.LabelId;
import com.google.gerrit.entities.PatchSet;
import com.google.gerrit.entities.PatchSetApproval;
import com.google.gerrit.entities.Project;
import com.google.gerrit.entities.RefNames;
import com.google.gerrit.entities.SubmitRecord;
import com.google.gerrit.exceptions.StorageException;
import com.google.gerrit.server.ApprovalsUtil;
import com.google.gerrit.server.ChangeMessagesUtil;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.change.LabelNormalizer;
import com.google.gerrit.server.git.CodeReviewCommit;
import com.google.gerrit.server.git.CodeReviewCommit.CodeReviewRevWalk;
import com.google.gerrit.server.git.GroupCollector;
import com.google.gerrit.server.git.MergeUtil;
import com.google.gerrit.server.notedb.ChangeUpdate;
import com.google.gerrit.server.project.ProjectConfig;
import com.google.gerrit.server.project.ProjectState;
import com.google.gerrit.server.update.BatchUpdateOp;
import com.google.gerrit.server.update.ChangeContext;
import com.google.gerrit.server.update.Context;
import com.google.gerrit.server.update.RepoContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import org.eclipse.jgit.errors.IncorrectObjectTypeException;
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.transport.ReceiveCommand;
abstract class SubmitStrategyOp implements BatchUpdateOp {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
protected final SubmitStrategy.Arguments args;
protected final CodeReviewCommit toMerge;
private ReceiveCommand command;
private PatchSetApproval submitter;
private ObjectId mergeResultRev;
private PatchSet mergedPatchSet;
private Change updatedChange;
private CodeReviewCommit alreadyMergedCommit;
private boolean changeAlreadyMerged;
protected SubmitStrategyOp(SubmitStrategy.Arguments args, CodeReviewCommit toMerge) {
this.args = args;
this.toMerge = toMerge;
}
final Change.Id getId() {
return toMerge.change().getId();
}
final CodeReviewCommit getCommit() {
return toMerge;
}
protected final BranchNameKey getDest() {
return toMerge.change().getDest();
}
protected final Project.NameKey getProject() {
return getDest().project();
}
@Override
public final void updateRepo(RepoContext ctx) throws Exception {
logger.atFine().log(
"%s#updateRepo for change %s", getClass().getSimpleName(), toMerge.change().getId());
checkState(
ctx.getRevWalk() == args.rw,
"SubmitStrategyOp requires callers to call BatchUpdate#setRepository with exactly the same"
+ " CodeReviewRevWalk instance from the SubmitStrategy.Arguments: %s != %s",
ctx.getRevWalk(),
args.rw);
// Run the submit strategy implementation and record the merge tip state so
// we can create the ref update.
CodeReviewCommit tipBefore = args.mergeTip.getCurrentTip();
alreadyMergedCommit = getAlreadyMergedCommit(ctx);
if (alreadyMergedCommit == null) {
updateRepoImpl(ctx);
} else {
logger.atFine().log("Already merged as %s", alreadyMergedCommit.name());
}
CodeReviewCommit tipAfter = args.mergeTip.getCurrentTip();
if (Objects.equals(tipBefore, tipAfter)) {
logger.atFine().log("Did not move tip");
return;
} else if (tipAfter == null) {
logger.atFine().log("No merge tip, no update to perform");
return;
}
logger.atFine().log("Moved tip from %s to %s", tipBefore, tipAfter);
checkProjectConfig(ctx, tipAfter);
// Needed by postUpdate, at which point mergeTip will have advanced further,
// so it's easier to just snapshot the command.
command =
new ReceiveCommand(
firstNonNull(tipBefore, ObjectId.zeroId()), tipAfter, getDest().branch());
ctx.addRefUpdate(command);
args.submoduleCommits.addBranchTip(getDest(), tipAfter);
}
private void checkProjectConfig(RepoContext ctx, CodeReviewCommit commit) {
String refName = getDest().branch();
if (RefNames.REFS_CONFIG.equals(refName)) {
logger.atFine().log("Loading new configuration from %s", RefNames.REFS_CONFIG);
try {
ProjectConfig cfg = args.projectConfigFactory.create(getProject());
cfg.load(ctx.getRevWalk(), commit);
} catch (Exception e) {
throw new StorageException(
"Submit would store invalid"
+ " project configuration "
+ commit.name()
+ " for "
+ getProject(),
e);
}
}
}
private CodeReviewCommit getAlreadyMergedCommit(RepoContext ctx) throws IOException {
CodeReviewCommit tip = args.mergeTip.getInitialTip();
if (tip == null) {
return null;
}
CodeReviewRevWalk rw = (CodeReviewRevWalk) ctx.getRevWalk();
Change.Id id = getId();
String refPrefix = id.toRefPrefix();
Map<String, ObjectId> refs = ctx.getRepoView().getRefs(refPrefix);
List<CodeReviewCommit> commits = new ArrayList<>(refs.size());
for (Map.Entry<String, ObjectId> e : refs.entrySet()) {
PatchSet.Id psId = PatchSet.Id.fromRef(refPrefix + e.getKey());
if (psId == null) {
continue;
}
try {
CodeReviewCommit c = rw.parseCommit(e.getValue());
c.setPatchsetId(psId);
commits.add(c);
} catch (MissingObjectException | IncorrectObjectTypeException ex) {
continue; // Bogus ref, can't be merged into tip so we don't care.
}
}
commits.sort(comparing((CodeReviewCommit c) -> c.getPatchsetId().get()).reversed());
CodeReviewCommit result = MergeUtil.findAnyMergedInto(rw, commits, tip);
if (result == null) {
return null;
}
// Some patch set of this change is actually merged into the target
// branch, most likely because a previous run of MergeOp failed after
// updateRepo, during updateChange.
//
// Do the best we can to clean this up: mark the change as merged and set
// the current patch set. Don't touch the dest branch at all. This can
// lead to some odd situations like another change in the set merging in
// a different patch set of this change, but that's unavoidable at this
// point. At least the change will end up in the right state.
//
// TODO(dborowitz): Consider deleting later junk patch set refs. They
// presumably don't have PatchSets pointing to them.
rw.parseBody(result);
result.add(args.canMergeFlag);
PatchSet.Id psId = result.getPatchsetId();
result.copyFrom(toMerge);
result.setPatchsetId(psId); // Got overwriten by copyFrom.
result.setStatusCode(CommitMergeStatus.ALREADY_MERGED);
args.commitStatus.put(result);
return result;
}
@Override
public final boolean updateChange(ChangeContext ctx) throws Exception {
logger.atFine().log(
"%s#updateChange for change %s", getClass().getSimpleName(), toMerge.change().getId());
toMerge.setNotes(ctx.getNotes()); // Update change and notes from ctx.
if (ctx.getChange().isMerged()) {
// Either another thread won a race, or we are retrying a whole topic submission after one
// repo failed with lock failure.
if (alreadyMergedCommit == null) {
logger.atFine().log(
"Change is already merged according to its status, but we were unable to find it"
+ " merged into the current tip (%s)",
args.mergeTip.getCurrentTip().name());
} else {
logger.atFine().log("Change is already merged");
}
changeAlreadyMerged = true;
return false;
}
if (alreadyMergedCommit != null) {
alreadyMergedCommit.setNotes(ctx.getNotes());
mergedPatchSet = getOrCreateAlreadyMergedPatchSet(ctx);
} else {
PatchSet newPatchSet = updateChangeImpl(ctx);
PatchSet.Id oldPsId = requireNonNull(toMerge.getPatchsetId());
PatchSet.Id newPsId = requireNonNull(ctx.getChange().currentPatchSetId());
if (newPatchSet == null) {
checkState(
oldPsId.equals(newPsId),
"patch set advanced from %s to %s but updateChangeImpl did not"
+ " return new patch set instance",
oldPsId,
newPsId);
// Ok to use stale notes to get the old patch set, which didn't change
// during the submit strategy.
mergedPatchSet =
requireNonNull(
args.psUtil.get(ctx.getNotes(), oldPsId),
() -> String.format("missing old patch set %s", oldPsId));
} else {
PatchSet.Id n = newPatchSet.id();
checkState(
!n.equals(oldPsId) && n.equals(newPsId),
"current patch was %s and is now %s, but updateChangeImpl returned"
+ " new patch set instance at %s",
oldPsId,
newPsId,
n);
mergedPatchSet = newPatchSet;
}
}
Change c = ctx.getChange();
Change.Id id = c.getId();
CodeReviewCommit commit = args.commitStatus.get(id);
requireNonNull(commit, () -> String.format("missing commit for change %s", id));
CommitMergeStatus s = commit.getStatusCode();
requireNonNull(
s,
() -> String.format("status not set for change %s; expected to previously fail fast", id));
logger.atFine().log("Status of change %s (%s) on %s: %s", id, commit.name(), c.getDest(), s);
setApproval(ctx, args.caller);
mergeResultRev =
alreadyMergedCommit == null
? args.mergeTip.getMergeResults().get(commit)
// Our fixup code is not smart enough to find a merge commit
// corresponding to the merge result. This results in a different
// ChangeMergedEvent in the fixup case, but we'll just live with that.
: alreadyMergedCommit;
try {
setMerged(ctx, message(ctx, commit, s));
} catch (StorageException err) {
String msg = "Error updating change status for " + id;
logger.atSevere().withCause(err).log(msg);
args.commitStatus.logProblem(id, msg);
// It's possible this happened before updating anything in the db, but
// it's hard to know for sure, so just return true below to be safe.
}
updatedChange = c;
return true;
}
/**
* Returns the updated change after this op has been executed.
*
* @return the updated change after this op has been executed, {@link Optional#empty()} if the op
* was not executed yet, or if the execution has failed
*/
public Optional<Change> getUpdatedChange() {
return Optional.ofNullable(updatedChange);
}
private PatchSet getOrCreateAlreadyMergedPatchSet(ChangeContext ctx) throws IOException {
PatchSet.Id psId = alreadyMergedCommit.getPatchsetId();
logger.atFine().log("Fixing up already-merged patch set %s", psId);
PatchSet prevPs = args.psUtil.current(ctx.getNotes());
ctx.getRevWalk().parseBody(alreadyMergedCommit);
ctx.getChange()
.setCurrentPatchSet(
psId, alreadyMergedCommit.getShortMessage(), ctx.getChange().getOriginalSubject());
PatchSet existing = args.psUtil.get(ctx.getNotes(), psId);
if (existing != null) {
logger.atFine().log("Patch set row exists, only updating change");
return existing;
}
// No patch set for the already merged commit, although we know it came form
// a patch set ref. Fix up the database. Note that this uses the current
// user as the uploader, which is as good a guess as any.
List<String> groups =
prevPs != null ? prevPs.groups() : GroupCollector.getDefaultGroups(alreadyMergedCommit);
return args.psUtil.insert(
ctx.getRevWalk(), ctx.getUpdate(psId), psId, alreadyMergedCommit, groups, null, null);
}
private void setApproval(ChangeContext ctx, IdentifiedUser user) throws IOException {
Change.Id id = ctx.getChange().getId();
List<SubmitRecord> records = args.commitStatus.getSubmitRecords(id);
PatchSet.Id oldPsId = toMerge.getPatchsetId();
PatchSet.Id newPsId = ctx.getChange().currentPatchSetId();
logger.atFine().log("Add approval for %s", id);
ChangeUpdate origPsUpdate = ctx.getUpdate(oldPsId);
origPsUpdate.putReviewer(user.getAccountId(), REVIEWER);
LabelNormalizer.Result normalized = approve(ctx, origPsUpdate);
ChangeUpdate newPsUpdate = ctx.getUpdate(newPsId);
newPsUpdate.merge(args.submissionId, records);
// If the submit strategy created a new revision (rebase, cherry-pick), copy
// approvals as well.
if (!newPsId.equals(oldPsId)) {
saveApprovals(normalized, newPsUpdate, true);
submitter = submitter.copyWithPatchSet(newPsId);
}
}
private LabelNormalizer.Result approve(ChangeContext ctx, ChangeUpdate update)
throws IOException {
PatchSet.Id psId = update.getPatchSetId();
Map<PatchSetApproval.Key, PatchSetApproval> byKey = new HashMap<>();
for (PatchSetApproval psa :
args.approvalsUtil.byPatchSet(
ctx.getNotes(), psId, ctx.getRevWalk(), ctx.getRepoView().getConfig())) {
byKey.put(psa.key(), psa);
}
submitter =
ApprovalsUtil.newApproval(psId, ctx.getUser(), LabelId.legacySubmit(), 1, ctx.getWhen())
.build();
byKey.put(submitter.key(), submitter);
// Flatten out existing approvals for this patch set based upon the current
// permissions. Once the change is closed the approvals are not updated at
// presentation view time, except for zero votes used to indicate a reviewer
// was added. So we need to make sure votes are accurate now. This way if
// permissions get modified in the future, historical records stay accurate.
LabelNormalizer.Result normalized =
args.labelNormalizer.normalize(ctx.getNotes(), byKey.values());
update.putApproval(submitter.label(), submitter.value());
saveApprovals(normalized, update, false);
return normalized;
}
private void saveApprovals(
LabelNormalizer.Result normalized, ChangeUpdate update, boolean includeUnchanged) {
for (PatchSetApproval psa : normalized.updated()) {
update.putApprovalFor(psa.accountId(), psa.label(), psa.value());
}
for (PatchSetApproval psa : normalized.deleted()) {
update.removeApprovalFor(psa.accountId(), psa.label());
}
// TODO(dborowitz): Don't use a label in NoteDb; just check when status
// change happened.
for (PatchSetApproval psa : normalized.unchanged()) {
if (includeUnchanged || psa.isLegacySubmit()) {
logger.atFine().log("Adding submit label %s", psa);
update.putApprovalFor(psa.accountId(), psa.label(), psa.value());
}
}
}
private ChangeMessage message(ChangeContext ctx, CodeReviewCommit commit, CommitMergeStatus s) {
requireNonNull(s, "CommitMergeStatus may not be null");
String txt = s.getDescription();
if (s == CommitMergeStatus.CLEAN_MERGE) {
return message(ctx, commit.getPatchsetId(), txt);
} else if (s == CommitMergeStatus.CLEAN_REBASE || s == CommitMergeStatus.CLEAN_PICK) {
return message(ctx, commit.getPatchsetId(), txt + " as " + commit.name());
} else if (s == CommitMergeStatus.SKIPPED_IDENTICAL_TREE) {
return message(ctx, commit.getPatchsetId(), txt);
} else if (s == CommitMergeStatus.ALREADY_MERGED) {
// Best effort to mimic the message that would have happened had this
// succeeded the first time around.
switch (args.submitType) {
case FAST_FORWARD_ONLY:
case MERGE_ALWAYS:
case MERGE_IF_NECESSARY:
return message(ctx, commit, CommitMergeStatus.CLEAN_MERGE);
case CHERRY_PICK:
return message(ctx, commit, CommitMergeStatus.CLEAN_PICK);
case REBASE_IF_NECESSARY:
case REBASE_ALWAYS:
return message(ctx, commit, CommitMergeStatus.CLEAN_REBASE);
case INHERIT:
default:
throw new IllegalStateException(
"unexpected submit type "
+ args.submitType.toString()
+ " for change "
+ commit.change().getId());
}
} else {
throw new IllegalStateException(
"unexpected status "
+ s
+ " for change "
+ commit.change().getId()
+ "; expected to previously fail fast");
}
}
private ChangeMessage message(ChangeContext ctx, PatchSet.Id psId, String body) {
return ChangeMessagesUtil.newMessage(
psId, ctx.getUser(), ctx.getWhen(), body, ChangeMessagesUtil.TAG_MERGED);
}
private void setMerged(ChangeContext ctx, ChangeMessage msg) {
Change c = ctx.getChange();
logger.atFine().log("Setting change %s merged", c.getId());
c.setStatus(Change.Status.MERGED);
c.setSubmissionId(args.submissionId.toString());
// TODO(dborowitz): We need to be able to change the author of the message,
// which is not the user from the update context. addMergedMessage was able
// to do this in the past.
if (msg != null) {
args.cmUtil.addChangeMessage(ctx.getUpdate(msg.getPatchSetId()), msg);
}
}
@Override
public final void postUpdate(Context ctx) throws Exception {
if (changeAlreadyMerged) {
// TODO(dborowitz): This is suboptimal behavior in the presence of retries: postUpdate steps
// will never get run for changes that submitted successfully on any but the final attempt.
// This is primarily a temporary workaround for the fact that the submitter field is not
// populated in the changeAlreadyMerged case.
//
// If we naively execute postUpdate even if the change is already merged when updateChange
// being, then we are subject to a race where postUpdate steps are run twice if two submit
// processes run at the same time.
logger.atFine().log(
"Skipping post-update steps for change %s; submitter is %s", getId(), submitter);
return;
}
logger.atFine().log(
"Begin post-update steps for change %s; submitter is %s", getId(), submitter);
postUpdateImpl(ctx);
if (command != null) {
args.tagCache.updateFastForward(
getProject(), command.getRefName(), command.getOldId(), command.getNewId());
// TODO(dborowitz): Move to BatchUpdate? Would also allow us to run once
// per project even if multiple changes to refs/meta/config are submitted.
if (RefNames.REFS_CONFIG.equals(getDest().branch())) {
args.projectCache.evict(getProject());
ProjectState p =
args.projectCache.get(getProject()).orElseThrow(illegalState(getProject()));
try (Repository git = args.repoManager.openRepository(getProject())) {
git.setGitwebDescription(p.getProject().getDescription());
} catch (IOException e) {
logger.atSevere().withCause(e).log("cannot update description of %s", p.getName());
}
}
}
logger.atFine().log(
"Begin sending emails for submitting change %s; submitter is %s", getId(), submitter);
// Assume the change must have been merged at this point, otherwise we would
// have failed fast in one of the other steps.
try {
args.mergedSenderFactory
.create(
ctx.getProject(),
toMerge.change(),
submitter.accountId(),
ctx.getNotify(getId()),
ctx.getRepoView())
.sendAsync();
} catch (Exception e) {
logger.atSevere().withCause(e).log("Cannot email merged notification for %s", getId());
}
if (mergeResultRev != null && !args.dryrun) {
args.changeMerged.fire(
updatedChange,
mergedPatchSet,
args.accountCache.get(submitter.accountId()).orElse(null),
args.mergeTip.getCurrentTip().name(),
ctx.getWhen());
}
}
/**
* @see #updateRepo(RepoContext)
* @param ctx
*/
protected void updateRepoImpl(RepoContext ctx) throws Exception {}
/**
* @see #updateChange(ChangeContext)
* @param ctx
* @return a new patch set if one was created by the submit strategy, or null if not.
*/
protected PatchSet updateChangeImpl(ChangeContext ctx) throws Exception {
return null;
}
/**
* @see #postUpdate(Context)
* @param ctx
*/
protected void postUpdateImpl(Context ctx) throws Exception {}
/**
* Amend the commit with gitlink update
*
* @param commit
*/
protected CodeReviewCommit amendGitlink(CodeReviewCommit commit)
throws IntegrationConflictException {
if (!args.subscriptionGraph.hasSubscription(args.destBranch)) {
return commit;
}
// Modify the commit with gitlink update
try {
return args.submoduleCommits.amendGitlinksCommit(
args.destBranch, commit, args.subscriptionGraph.getSubscriptions(args.destBranch));
} catch (IOException e) {
throw new StorageException(
String.format("cannot update gitlink for the commit at branch %s", args.destBranch), e);
} catch (SubmoduleConflictException e) {
throw new IntegrationConflictException(
String.format(
"cannot update gitlink for the commit at branch %s: %s",
args.destBranch, e.getMessage()),
e);
}
}
}
| java/com/google/gerrit/server/submit/SubmitStrategyOp.java | // Copyright (C) 2016 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.submit;
import static com.google.common.base.MoreObjects.firstNonNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.gerrit.server.notedb.ReviewerStateInternal.REVIEWER;
import static com.google.gerrit.server.project.ProjectCache.illegalState;
import static java.util.Comparator.comparing;
import static java.util.Objects.requireNonNull;
import com.google.common.flogger.FluentLogger;
import com.google.gerrit.entities.BranchNameKey;
import com.google.gerrit.entities.Change;
import com.google.gerrit.entities.ChangeMessage;
import com.google.gerrit.entities.LabelId;
import com.google.gerrit.entities.PatchSet;
import com.google.gerrit.entities.PatchSetApproval;
import com.google.gerrit.entities.Project;
import com.google.gerrit.entities.RefNames;
import com.google.gerrit.entities.SubmitRecord;
import com.google.gerrit.exceptions.StorageException;
import com.google.gerrit.server.ApprovalsUtil;
import com.google.gerrit.server.ChangeMessagesUtil;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.change.LabelNormalizer;
import com.google.gerrit.server.git.CodeReviewCommit;
import com.google.gerrit.server.git.CodeReviewCommit.CodeReviewRevWalk;
import com.google.gerrit.server.git.GroupCollector;
import com.google.gerrit.server.git.MergeUtil;
import com.google.gerrit.server.notedb.ChangeUpdate;
import com.google.gerrit.server.project.ProjectConfig;
import com.google.gerrit.server.project.ProjectState;
import com.google.gerrit.server.update.BatchUpdateOp;
import com.google.gerrit.server.update.ChangeContext;
import com.google.gerrit.server.update.Context;
import com.google.gerrit.server.update.RepoContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import org.eclipse.jgit.errors.IncorrectObjectTypeException;
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.transport.ReceiveCommand;
abstract class SubmitStrategyOp implements BatchUpdateOp {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
protected final SubmitStrategy.Arguments args;
protected final CodeReviewCommit toMerge;
private ReceiveCommand command;
private PatchSetApproval submitter;
private ObjectId mergeResultRev;
private PatchSet mergedPatchSet;
private Change updatedChange;
private CodeReviewCommit alreadyMergedCommit;
private boolean changeAlreadyMerged;
protected SubmitStrategyOp(SubmitStrategy.Arguments args, CodeReviewCommit toMerge) {
this.args = args;
this.toMerge = toMerge;
}
final Change.Id getId() {
return toMerge.change().getId();
}
final CodeReviewCommit getCommit() {
return toMerge;
}
protected final BranchNameKey getDest() {
return toMerge.change().getDest();
}
protected final Project.NameKey getProject() {
return getDest().project();
}
@Override
public final void updateRepo(RepoContext ctx) throws Exception {
logger.atFine().log(
"%s#updateRepo for change %s", getClass().getSimpleName(), toMerge.change().getId());
checkState(
ctx.getRevWalk() == args.rw,
"SubmitStrategyOp requires callers to call BatchUpdate#setRepository with exactly the same"
+ " CodeReviewRevWalk instance from the SubmitStrategy.Arguments: %s != %s",
ctx.getRevWalk(),
args.rw);
// Run the submit strategy implementation and record the merge tip state so
// we can create the ref update.
CodeReviewCommit tipBefore = args.mergeTip.getCurrentTip();
alreadyMergedCommit = getAlreadyMergedCommit(ctx);
if (alreadyMergedCommit == null) {
updateRepoImpl(ctx);
} else {
logger.atFine().log("Already merged as %s", alreadyMergedCommit.name());
}
CodeReviewCommit tipAfter = args.mergeTip.getCurrentTip();
if (Objects.equals(tipBefore, tipAfter)) {
logger.atFine().log("Did not move tip");
return;
} else if (tipAfter == null) {
logger.atFine().log("No merge tip, no update to perform");
return;
}
logger.atFine().log("Moved tip from %s to %s", tipBefore, tipAfter);
checkProjectConfig(ctx, tipAfter);
// Needed by postUpdate, at which point mergeTip will have advanced further,
// so it's easier to just snapshot the command.
command =
new ReceiveCommand(
firstNonNull(tipBefore, ObjectId.zeroId()), tipAfter, getDest().branch());
ctx.addRefUpdate(command);
args.submoduleCommits.addBranchTip(getDest(), tipAfter);
}
private void checkProjectConfig(RepoContext ctx, CodeReviewCommit commit) {
String refName = getDest().branch();
if (RefNames.REFS_CONFIG.equals(refName)) {
logger.atFine().log("Loading new configuration from %s", RefNames.REFS_CONFIG);
try {
ProjectConfig cfg = args.projectConfigFactory.create(getProject());
cfg.load(ctx.getRevWalk(), commit);
} catch (Exception e) {
throw new StorageException(
"Submit would store invalid"
+ " project configuration "
+ commit.name()
+ " for "
+ getProject(),
e);
}
}
}
private CodeReviewCommit getAlreadyMergedCommit(RepoContext ctx) throws IOException {
CodeReviewCommit tip = args.mergeTip.getInitialTip();
if (tip == null) {
return null;
}
CodeReviewRevWalk rw = (CodeReviewRevWalk) ctx.getRevWalk();
Change.Id id = getId();
String refPrefix = id.toRefPrefix();
Map<String, ObjectId> refs = ctx.getRepoView().getRefs(refPrefix);
List<CodeReviewCommit> commits = new ArrayList<>(refs.size());
for (Map.Entry<String, ObjectId> e : refs.entrySet()) {
PatchSet.Id psId = PatchSet.Id.fromRef(refPrefix + e.getKey());
if (psId == null) {
continue;
}
try {
CodeReviewCommit c = rw.parseCommit(e.getValue());
c.setPatchsetId(psId);
commits.add(c);
} catch (MissingObjectException | IncorrectObjectTypeException ex) {
continue; // Bogus ref, can't be merged into tip so we don't care.
}
}
commits.sort(comparing((CodeReviewCommit c) -> c.getPatchsetId().get()).reversed());
CodeReviewCommit result = MergeUtil.findAnyMergedInto(rw, commits, tip);
if (result == null) {
return null;
}
// Some patch set of this change is actually merged into the target
// branch, most likely because a previous run of MergeOp failed after
// updateRepo, during updateChange.
//
// Do the best we can to clean this up: mark the change as merged and set
// the current patch set. Don't touch the dest branch at all. This can
// lead to some odd situations like another change in the set merging in
// a different patch set of this change, but that's unavoidable at this
// point. At least the change will end up in the right state.
//
// TODO(dborowitz): Consider deleting later junk patch set refs. They
// presumably don't have PatchSets pointing to them.
rw.parseBody(result);
result.add(args.canMergeFlag);
PatchSet.Id psId = result.getPatchsetId();
result.copyFrom(toMerge);
result.setPatchsetId(psId); // Got overwriten by copyFrom.
result.setStatusCode(CommitMergeStatus.ALREADY_MERGED);
args.commitStatus.put(result);
return result;
}
@Override
public final boolean updateChange(ChangeContext ctx) throws Exception {
logger.atFine().log(
"%s#updateChange for change %s", getClass().getSimpleName(), toMerge.change().getId());
toMerge.setNotes(ctx.getNotes()); // Update change and notes from ctx.
if (ctx.getChange().isMerged()) {
// Either another thread won a race, or we are retrying a whole topic submission after one
// repo failed with lock failure.
if (alreadyMergedCommit == null) {
logger.atFine().log(
"Change is already merged according to its status, but we were unable to find it"
+ " merged into the current tip (%s)",
args.mergeTip.getCurrentTip().name());
} else {
logger.atFine().log("Change is already merged");
}
changeAlreadyMerged = true;
return false;
}
if (alreadyMergedCommit != null) {
alreadyMergedCommit.setNotes(ctx.getNotes());
mergedPatchSet = getOrCreateAlreadyMergedPatchSet(ctx);
} else {
PatchSet newPatchSet = updateChangeImpl(ctx);
PatchSet.Id oldPsId = requireNonNull(toMerge.getPatchsetId());
PatchSet.Id newPsId = requireNonNull(ctx.getChange().currentPatchSetId());
if (newPatchSet == null) {
checkState(
oldPsId.equals(newPsId),
"patch set advanced from %s to %s but updateChangeImpl did not"
+ " return new patch set instance",
oldPsId,
newPsId);
// Ok to use stale notes to get the old patch set, which didn't change
// during the submit strategy.
mergedPatchSet =
requireNonNull(
args.psUtil.get(ctx.getNotes(), oldPsId),
() -> String.format("missing old patch set %s", oldPsId));
} else {
PatchSet.Id n = newPatchSet.id();
checkState(
!n.equals(oldPsId) && n.equals(newPsId),
"current patch was %s and is now %s, but updateChangeImpl returned"
+ " new patch set instance at %s",
oldPsId,
newPsId,
n);
mergedPatchSet = newPatchSet;
}
}
Change c = ctx.getChange();
Change.Id id = c.getId();
CodeReviewCommit commit = args.commitStatus.get(id);
requireNonNull(commit, () -> String.format("missing commit for change %s", id));
CommitMergeStatus s = commit.getStatusCode();
requireNonNull(
s,
() -> String.format("status not set for change %s; expected to previously fail fast", id));
logger.atFine().log("Status of change %s (%s) on %s: %s", id, commit.name(), c.getDest(), s);
setApproval(ctx, args.caller);
mergeResultRev =
alreadyMergedCommit == null
? args.mergeTip.getMergeResults().get(commit)
// Our fixup code is not smart enough to find a merge commit
// corresponding to the merge result. This results in a different
// ChangeMergedEvent in the fixup case, but we'll just live with that.
: alreadyMergedCommit;
try {
setMerged(ctx, message(ctx, commit, s));
} catch (StorageException err) {
String msg = "Error updating change status for " + id;
logger.atSevere().withCause(err).log(msg);
args.commitStatus.logProblem(id, msg);
// It's possible this happened before updating anything in the db, but
// it's hard to know for sure, so just return true below to be safe.
}
updatedChange = c;
return true;
}
/**
* Returns the updated change after this op has been executed.
*
* @return the updated change after this op has been executed, {@link Optional#empty()} if the op
* was not executed yet, or if the execution has failed
*/
public Optional<Change> getUpdatedChange() {
return Optional.ofNullable(updatedChange);
}
private PatchSet getOrCreateAlreadyMergedPatchSet(ChangeContext ctx) throws IOException {
PatchSet.Id psId = alreadyMergedCommit.getPatchsetId();
logger.atFine().log("Fixing up already-merged patch set %s", psId);
PatchSet prevPs = args.psUtil.current(ctx.getNotes());
ctx.getRevWalk().parseBody(alreadyMergedCommit);
ctx.getChange()
.setCurrentPatchSet(
psId, alreadyMergedCommit.getShortMessage(), ctx.getChange().getOriginalSubject());
PatchSet existing = args.psUtil.get(ctx.getNotes(), psId);
if (existing != null) {
logger.atFine().log("Patch set row exists, only updating change");
return existing;
}
// No patch set for the already merged commit, although we know it came form
// a patch set ref. Fix up the database. Note that this uses the current
// user as the uploader, which is as good a guess as any.
List<String> groups =
prevPs != null ? prevPs.groups() : GroupCollector.getDefaultGroups(alreadyMergedCommit);
return args.psUtil.insert(
ctx.getRevWalk(), ctx.getUpdate(psId), psId, alreadyMergedCommit, groups, null, null);
}
private void setApproval(ChangeContext ctx, IdentifiedUser user) throws IOException {
Change.Id id = ctx.getChange().getId();
List<SubmitRecord> records = args.commitStatus.getSubmitRecords(id);
PatchSet.Id oldPsId = toMerge.getPatchsetId();
PatchSet.Id newPsId = ctx.getChange().currentPatchSetId();
logger.atFine().log("Add approval for %s", id);
ChangeUpdate origPsUpdate = ctx.getUpdate(oldPsId);
origPsUpdate.putReviewer(user.getAccountId(), REVIEWER);
LabelNormalizer.Result normalized = approve(ctx, origPsUpdate);
ChangeUpdate newPsUpdate = ctx.getUpdate(newPsId);
newPsUpdate.merge(args.submissionId, records);
// If the submit strategy created a new revision (rebase, cherry-pick), copy
// approvals as well.
if (!newPsId.equals(oldPsId)) {
saveApprovals(normalized, newPsUpdate, true);
submitter = submitter.copyWithPatchSet(newPsId);
}
}
private LabelNormalizer.Result approve(ChangeContext ctx, ChangeUpdate update)
throws IOException {
PatchSet.Id psId = update.getPatchSetId();
Map<PatchSetApproval.Key, PatchSetApproval> byKey = new HashMap<>();
for (PatchSetApproval psa :
args.approvalsUtil.byPatchSet(
ctx.getNotes(), psId, ctx.getRevWalk(), ctx.getRepoView().getConfig())) {
byKey.put(psa.key(), psa);
}
submitter =
ApprovalsUtil.newApproval(psId, ctx.getUser(), LabelId.legacySubmit(), 1, ctx.getWhen())
.build();
byKey.put(submitter.key(), submitter);
// Flatten out existing approvals for this patch set based upon the current
// permissions. Once the change is closed the approvals are not updated at
// presentation view time, except for zero votes used to indicate a reviewer
// was added. So we need to make sure votes are accurate now. This way if
// permissions get modified in the future, historical records stay accurate.
LabelNormalizer.Result normalized =
args.labelNormalizer.normalize(ctx.getNotes(), byKey.values());
update.putApproval(submitter.label(), submitter.value());
saveApprovals(normalized, update, false);
return normalized;
}
private void saveApprovals(
LabelNormalizer.Result normalized, ChangeUpdate update, boolean includeUnchanged) {
for (PatchSetApproval psa : normalized.updated()) {
update.putApprovalFor(psa.accountId(), psa.label(), psa.value());
}
for (PatchSetApproval psa : normalized.deleted()) {
update.removeApprovalFor(psa.accountId(), psa.label());
}
// TODO(dborowitz): Don't use a label in NoteDb; just check when status
// change happened.
for (PatchSetApproval psa : normalized.unchanged()) {
if (includeUnchanged || psa.isLegacySubmit()) {
logger.atFine().log("Adding submit label %s", psa);
update.putApprovalFor(psa.accountId(), psa.label(), psa.value());
}
}
}
private ChangeMessage message(ChangeContext ctx, CodeReviewCommit commit, CommitMergeStatus s) {
requireNonNull(s, "CommitMergeStatus may not be null");
String txt = s.getDescription();
if (s == CommitMergeStatus.CLEAN_MERGE) {
return message(ctx, commit.getPatchsetId(), txt);
} else if (s == CommitMergeStatus.CLEAN_REBASE || s == CommitMergeStatus.CLEAN_PICK) {
return message(ctx, commit.getPatchsetId(), txt + " as " + commit.name());
} else if (s == CommitMergeStatus.SKIPPED_IDENTICAL_TREE) {
return message(ctx, commit.getPatchsetId(), txt);
} else if (s == CommitMergeStatus.ALREADY_MERGED) {
// Best effort to mimic the message that would have happened had this
// succeeded the first time around.
switch (args.submitType) {
case FAST_FORWARD_ONLY:
case MERGE_ALWAYS:
case MERGE_IF_NECESSARY:
return message(ctx, commit, CommitMergeStatus.CLEAN_MERGE);
case CHERRY_PICK:
return message(ctx, commit, CommitMergeStatus.CLEAN_PICK);
case REBASE_IF_NECESSARY:
case REBASE_ALWAYS:
return message(ctx, commit, CommitMergeStatus.CLEAN_REBASE);
case INHERIT:
default:
throw new IllegalStateException(
"unexpected submit type "
+ args.submitType.toString()
+ " for change "
+ commit.change().getId());
}
} else {
throw new IllegalStateException(
"unexpected status "
+ s
+ " for change "
+ commit.change().getId()
+ "; expected to previously fail fast");
}
}
private ChangeMessage message(ChangeContext ctx, PatchSet.Id psId, String body) {
return ChangeMessagesUtil.newMessage(
psId, ctx.getUser(), ctx.getWhen(), body, ChangeMessagesUtil.TAG_MERGED);
}
private void setMerged(ChangeContext ctx, ChangeMessage msg) {
Change c = ctx.getChange();
logger.atFine().log("Setting change %s merged", c.getId());
c.setStatus(Change.Status.MERGED);
c.setSubmissionId(args.submissionId.toString());
// TODO(dborowitz): We need to be able to change the author of the message,
// which is not the user from the update context. addMergedMessage was able
// to do this in the past.
if (msg != null) {
args.cmUtil.addChangeMessage(ctx.getUpdate(msg.getPatchSetId()), msg);
}
}
@Override
public final void postUpdate(Context ctx) throws Exception {
if (changeAlreadyMerged) {
// TODO(dborowitz): This is suboptimal behavior in the presence of retries: postUpdate steps
// will never get run for changes that submitted successfully on any but the final attempt.
// This is primarily a temporary workaround for the fact that the submitter field is not
// populated in the changeAlreadyMerged case.
//
// If we naively execute postUpdate even if the change is already merged when updateChange
// being, then we are subject to a race where postUpdate steps are run twice if two submit
// processes run at the same time.
logger.atFine().log("Skipping post-update steps for change %s", getId());
return;
}
postUpdateImpl(ctx);
if (command != null) {
args.tagCache.updateFastForward(
getProject(), command.getRefName(), command.getOldId(), command.getNewId());
// TODO(dborowitz): Move to BatchUpdate? Would also allow us to run once
// per project even if multiple changes to refs/meta/config are submitted.
if (RefNames.REFS_CONFIG.equals(getDest().branch())) {
args.projectCache.evict(getProject());
ProjectState p =
args.projectCache.get(getProject()).orElseThrow(illegalState(getProject()));
try (Repository git = args.repoManager.openRepository(getProject())) {
git.setGitwebDescription(p.getProject().getDescription());
} catch (IOException e) {
logger.atSevere().withCause(e).log("cannot update description of %s", p.getName());
}
}
}
// Assume the change must have been merged at this point, otherwise we would
// have failed fast in one of the other steps.
try {
args.mergedSenderFactory
.create(
ctx.getProject(),
toMerge.change(),
submitter.accountId(),
ctx.getNotify(getId()),
ctx.getRepoView())
.sendAsync();
} catch (Exception e) {
logger.atSevere().withCause(e).log("Cannot email merged notification for %s", getId());
}
if (mergeResultRev != null && !args.dryrun) {
args.changeMerged.fire(
updatedChange,
mergedPatchSet,
args.accountCache.get(submitter.accountId()).orElse(null),
args.mergeTip.getCurrentTip().name(),
ctx.getWhen());
}
}
/**
* @see #updateRepo(RepoContext)
* @param ctx
*/
protected void updateRepoImpl(RepoContext ctx) throws Exception {}
/**
* @see #updateChange(ChangeContext)
* @param ctx
* @return a new patch set if one was created by the submit strategy, or null if not.
*/
protected PatchSet updateChangeImpl(ChangeContext ctx) throws Exception {
return null;
}
/**
* @see #postUpdate(Context)
* @param ctx
*/
protected void postUpdateImpl(Context ctx) throws Exception {}
/**
* Amend the commit with gitlink update
*
* @param commit
*/
protected CodeReviewCommit amendGitlink(CodeReviewCommit commit)
throws IntegrationConflictException {
if (!args.subscriptionGraph.hasSubscription(args.destBranch)) {
return commit;
}
// Modify the commit with gitlink update
try {
return args.submoduleCommits.amendGitlinksCommit(
args.destBranch, commit, args.subscriptionGraph.getSubscriptions(args.destBranch));
} catch (IOException e) {
throw new StorageException(
String.format("cannot update gitlink for the commit at branch %s", args.destBranch), e);
} catch (SubmoduleConflictException e) {
throw new IntegrationConflictException(
String.format(
"cannot update gitlink for the commit at branch %s: %s",
args.destBranch, e.getMessage()),
e);
}
}
}
| Add more fine logging to SubmitStrategyOp
These additional logging allow us to track how many times postUpdate was
called for each change. Also, it allows tracking the "submitter" field
which was occasionally null.
Change-Id: I3e619576657e1b3e57f5e767d36f5e926d4e0c36
| java/com/google/gerrit/server/submit/SubmitStrategyOp.java | Add more fine logging to SubmitStrategyOp |
|
Java | apache-2.0 | 3a3abeaf99d1ac755faaacf04507c9b6bccfe4d7 | 0 | greensnow25/javaaz,greensnow25/javaaz,greensnow25/javaaz | package array;
/**
* public class As fsotiation arrays.
* @author greensnow25.
* @since 2.01.17.
* @version 1.0.
*/
public class As {
/**
* final method unites two arrays.
* @param one array.
* @param two array.
* @return unit array.
*/
public int[] sort(int[] one, int[] two) {
int[] result = new int[one.length + two.length];
int index = 0;
int j = 0;
int i = 0;
while (j != two.length && i != one.length) {
if (one[i] <= two[j]) {
result[index++] = one[i++];
} else if (two[j] <= one[i]) {
result[index++] = two[j++];
}
}
if (i < one.length) {
System.arraycopy(one,i,result,index,one.length-i);
}
else if (j < two.length) {
System.arraycopy(two,j,result,index,two.length-j);
}
return result;
}
}
| chapter1/Array/src/main/java/array/As.java | package array;
/**
* public class As fsotiation arrays.
* @author greensnow25.
* @since 2.01.17.
* @version 1.0.
*/
public class As {
/**
* final method unites two arrays.
* @param one array.
* @param two array.
* @return unit array.
*/
public int[] sort(int[] one, int[] two) {
int[] result = new int[one.length + two.length];
int index = 0;
int j = 0;
int i = 0;
while (j != two.length && i != one.length) {
if (one[i] <= two[j]) {
result[index++] = one[i++];
} else if (two[j] <= one[i]) {
result[index++] = two[j++];
}
}
if (i < one.length) {
System.arraycopy(one,i+1,result,index,one.length-i-1);
}
else if (j < two.length) {
System.arraycopy(two,j+1,result,index+1,two.length-j);
}
return result;
}
}
| Association of ordered arrays
| chapter1/Array/src/main/java/array/As.java | Association of ordered arrays |
|
Java | apache-2.0 | f3b2c702238f5592a6743adbe5aee42bbec9f4e6 | 0 | jcamachor/hive,alanfgates/hive,vergilchiu/hive,b-slim/hive,vineetgarg02/hive,jcamachor/hive,jcamachor/hive,alanfgates/hive,jcamachor/hive,lirui-apache/hive,sankarh/hive,vineetgarg02/hive,vergilchiu/hive,b-slim/hive,lirui-apache/hive,vineetgarg02/hive,vergilchiu/hive,b-slim/hive,vergilchiu/hive,nishantmonu51/hive,alanfgates/hive,b-slim/hive,jcamachor/hive,anishek/hive,lirui-apache/hive,lirui-apache/hive,jcamachor/hive,alanfgates/hive,vergilchiu/hive,sankarh/hive,anishek/hive,anishek/hive,lirui-apache/hive,jcamachor/hive,anishek/hive,lirui-apache/hive,sankarh/hive,sankarh/hive,b-slim/hive,anishek/hive,sankarh/hive,sankarh/hive,vineetgarg02/hive,vergilchiu/hive,jcamachor/hive,nishantmonu51/hive,vineetgarg02/hive,nishantmonu51/hive,alanfgates/hive,vergilchiu/hive,alanfgates/hive,nishantmonu51/hive,vineetgarg02/hive,alanfgates/hive,lirui-apache/hive,nishantmonu51/hive,vineetgarg02/hive,b-slim/hive,b-slim/hive,b-slim/hive,vineetgarg02/hive,anishek/hive,alanfgates/hive,anishek/hive,vergilchiu/hive,vineetgarg02/hive,jcamachor/hive,sankarh/hive,anishek/hive,sankarh/hive,sankarh/hive,anishek/hive,nishantmonu51/hive,nishantmonu51/hive,lirui-apache/hive,lirui-apache/hive,b-slim/hive,nishantmonu51/hive,vergilchiu/hive,nishantmonu51/hive,alanfgates/hive | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.parse;
import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVESTATSDBCLASS;
import java.io.IOException;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.BitSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeSet;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import net.hydromatic.optiq.SchemaPlus;
import net.hydromatic.optiq.tools.Frameworks;
import org.antlr.runtime.tree.Tree;
import org.antlr.runtime.tree.TreeWizard;
import org.antlr.runtime.tree.TreeWizard.ContextVisitor;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.FileUtils;
import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.common.ObjectPair;
import org.apache.hadoop.hive.common.StatsSetupConst;
import org.apache.hadoop.hive.common.StatsSetupConst.StatDB;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.MetaStoreUtils;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.QueryProperties;
import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator;
import org.apache.hadoop.hive.ql.exec.ArchiveUtils;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.FetchTask;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
import org.apache.hadoop.hive.ql.exec.FunctionInfo;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.GroupByOperator;
import org.apache.hadoop.hive.ql.exec.JoinOperator;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.OperatorFactory;
import org.apache.hadoop.hive.ql.exec.RecordReader;
import org.apache.hadoop.hive.ql.exec.RecordWriter;
import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
import org.apache.hadoop.hive.ql.exec.RowSchema;
import org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.exec.UnionOperator;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat;
import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
import org.apache.hadoop.hive.ql.io.NullRowsInputFormat;
import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.metadata.DummyPartition;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
import org.apache.hadoop.hive.ql.optimizer.Optimizer;
import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext;
import org.apache.hadoop.hive.ql.optimizer.optiq.HiveDefaultRelMetadataProvider;
import org.apache.hadoop.hive.ql.optimizer.optiq.Pair;
import org.apache.hadoop.hive.ql.optimizer.optiq.RelOptHiveTable;
import org.apache.hadoop.hive.ql.optimizer.optiq.cost.HiveVolcanoPlanner;
import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveAggregateRel;
import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveFilterRel;
import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveJoinRel;
import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveProjectRel;
import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveRel;
import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveSortRel;
import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveTableScanRel;
import org.apache.hadoop.hive.ql.optimizer.optiq.rules.HiveMergeProjectRule;
import org.apache.hadoop.hive.ql.optimizer.optiq.rules.HivePullUpProjectsAboveJoinRule;
import org.apache.hadoop.hive.ql.optimizer.optiq.rules.HivePushJoinThroughJoinRule;
import org.apache.hadoop.hive.ql.optimizer.optiq.rules.HiveSwapJoinRule;
import org.apache.hadoop.hive.ql.optimizer.optiq.translator.ASTConverter;
import org.apache.hadoop.hive.ql.optimizer.optiq.translator.RexNodeConverter;
import org.apache.hadoop.hive.ql.optimizer.optiq.translator.SqlFunctionConverter;
import org.apache.hadoop.hive.ql.optimizer.optiq.translator.TypeConverter;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.tableSpec.SpecType;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.OrderExpression;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.OrderSpec;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PTFInputSpec;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PTFQueryInputSpec;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PTFQueryInputType;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitionExpression;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitionSpec;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitionedTableFunctionSpec;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitioningSpec;
import org.apache.hadoop.hive.ql.parse.QBSubQuery.SubQueryType;
import org.apache.hadoop.hive.ql.parse.SubQueryUtils.ISubQueryJoinInfo;
import org.apache.hadoop.hive.ql.parse.WindowingSpec.BoundarySpec;
import org.apache.hadoop.hive.ql.parse.WindowingSpec.CurrentRowSpec;
import org.apache.hadoop.hive.ql.parse.WindowingSpec.Direction;
import org.apache.hadoop.hive.ql.parse.WindowingSpec.RangeBoundarySpec;
import org.apache.hadoop.hive.ql.parse.WindowingSpec.ValueBoundarySpec;
import org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowExpressionSpec;
import org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowFrameSpec;
import org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowFunctionSpec;
import org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowSpec;
import org.apache.hadoop.hive.ql.plan.AggregationDesc;
import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
import org.apache.hadoop.hive.ql.plan.CreateTableLikeDesc;
import org.apache.hadoop.hive.ql.plan.CreateViewDesc;
import org.apache.hadoop.hive.ql.plan.DDLWork;
import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnListDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;
import org.apache.hadoop.hive.ql.plan.ExtractDesc;
import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
import org.apache.hadoop.hive.ql.plan.FilterDesc;
import org.apache.hadoop.hive.ql.plan.FilterDesc.sampleDesc;
import org.apache.hadoop.hive.ql.plan.ForwardDesc;
import org.apache.hadoop.hive.ql.plan.GroupByDesc;
import org.apache.hadoop.hive.ql.plan.HiveOperation;
import org.apache.hadoop.hive.ql.plan.JoinCondDesc;
import org.apache.hadoop.hive.ql.plan.JoinDesc;
import org.apache.hadoop.hive.ql.plan.LateralViewForwardDesc;
import org.apache.hadoop.hive.ql.plan.LateralViewJoinDesc;
import org.apache.hadoop.hive.ql.plan.LimitDesc;
import org.apache.hadoop.hive.ql.plan.ListBucketingCtx;
import org.apache.hadoop.hive.ql.plan.LoadFileDesc;
import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.PTFDesc;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
import org.apache.hadoop.hive.ql.plan.ScriptDesc;
import org.apache.hadoop.hive.ql.plan.SelectDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.plan.TableScanDesc;
import org.apache.hadoop.hive.ql.plan.UDTFDesc;
import org.apache.hadoop.hive.ql.plan.UnionDesc;
import org.apache.hadoop.hive.ql.plan.ptf.OrderExpressionDef;
import org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef;
import org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.ResourceType;
import org.apache.hadoop.hive.ql.stats.StatsFactory;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFHash;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
import org.apache.hadoop.hive.serde2.NullStructSerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.mapred.InputFormat;
import org.eigenbase.rel.AggregateCall;
import org.eigenbase.rel.Aggregation;
import org.eigenbase.rel.InvalidRelException;
import org.eigenbase.rel.JoinRelType;
import org.eigenbase.rel.RelCollation;
import org.eigenbase.rel.RelCollationImpl;
import org.eigenbase.rel.RelFieldCollation;
import org.eigenbase.rel.RelNode;
import org.eigenbase.rel.metadata.CachingRelMetadataProvider;
import org.eigenbase.rel.metadata.ChainedRelMetadataProvider;
import org.eigenbase.rel.metadata.RelMetadataProvider;
import org.eigenbase.relopt.RelOptCluster;
import org.eigenbase.relopt.RelOptPlanner;
import org.eigenbase.relopt.RelOptQuery;
import org.eigenbase.relopt.RelOptSchema;
import org.eigenbase.relopt.RelTraitSet;
import org.eigenbase.reltype.RelDataType;
import org.eigenbase.reltype.RelDataTypeField;
import org.eigenbase.rex.RexBuilder;
import org.eigenbase.rex.RexInputRef;
import org.eigenbase.rex.RexNode;
import org.eigenbase.sql.fun.SqlStdOperatorTable;
import org.eigenbase.util.CompositeList;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
/**
* Implementation of the semantic analyzer. It generates the query plan.
* There are other specific semantic analyzers for some hive operations such as
* DDLSemanticAnalyzer for ddl operations.
*/
public class SemanticAnalyzer extends BaseSemanticAnalyzer {
public static final String DUMMY_DATABASE = "_dummy_database";
public static final String DUMMY_TABLE = "_dummy_table";
private HashMap<TableScanOperator, ExprNodeDesc> opToPartPruner;
private HashMap<TableScanOperator, PrunedPartitionList> opToPartList;
private HashMap<String, Operator<? extends OperatorDesc>> topOps;
private HashMap<String, Operator<? extends OperatorDesc>> topSelOps;
private LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtx;
private List<LoadTableDesc> loadTableWork;
private List<LoadFileDesc> loadFileWork;
private Map<JoinOperator, QBJoinTree> joinContext;
private Map<SMBMapJoinOperator, QBJoinTree> smbMapJoinContext;
private final HashMap<TableScanOperator, Table> topToTable;
private final Map<FileSinkOperator, Table> fsopToTable;
private final List<ReduceSinkOperator> reduceSinkOperatorsAddedByEnforceBucketingSorting;
private final HashMap<TableScanOperator, Map<String, String>> topToTableProps;
private QB qb;
private ASTNode ast;
private int destTableId;
private UnionProcContext uCtx;
List<AbstractMapJoinOperator<? extends MapJoinDesc>> listMapJoinOpsNoReducer;
private HashMap<TableScanOperator, sampleDesc> opToSamplePruner;
private final Map<TableScanOperator, Map<String, ExprNodeDesc>> opToPartToSkewedPruner;
/**
* a map for the split sampling, from ailias to an instance of SplitSample
* that describes percentage and number.
*/
private final HashMap<String, SplitSample> nameToSplitSample;
Map<GroupByOperator, Set<String>> groupOpToInputTables;
Map<String, PrunedPartitionList> prunedPartitions;
private List<FieldSchema> resultSchema;
private CreateViewDesc createVwDesc;
private ArrayList<String> viewsExpanded;
private ASTNode viewSelect;
private final UnparseTranslator unparseTranslator;
private final GlobalLimitCtx globalLimitCtx = new GlobalLimitCtx();
// prefix for column names auto generated by hive
private final String autogenColAliasPrfxLbl;
private final boolean autogenColAliasPrfxIncludeFuncName;
// Keep track of view alias to read entity corresponding to the view
// For eg: for a query like 'select * from V3', where V3 -> V2, V2 -> V1, V1 -> T
// keeps track of aliases for V3, V3:V2, V3:V2:V1.
// This is used when T is added as an input for the query, the parents of T is
// derived from the alias V3:V2:V1:T
private final Map<String, ReadEntity> viewAliasToInput = new HashMap<String, ReadEntity>();
// Max characters when auto generating the column name with func name
private static final int AUTOGEN_COLALIAS_PRFX_MAXLENGTH = 20;
// flag for no scan during analyze ... compute statistics
protected boolean noscan = false;
//flag for partial scan during analyze ... compute statistics
protected boolean partialscan = false;
private volatile boolean runCBO = true;
private volatile boolean disableJoinMerge = false;
/*
* Capture the CTE definitions in a Query.
*/
private final Map<String, ASTNode> aliasToCTEs;
/*
* Used to check recursive CTE invocations. Similar to viewsExpanded
*/
private ArrayList<String> ctesExpanded;
private static class Phase1Ctx {
String dest;
int nextNum;
}
protected SemanticAnalyzer(HiveConf conf, boolean runCBO) throws SemanticException {
this(conf);
this.runCBO = runCBO;
}
public SemanticAnalyzer(HiveConf conf) throws SemanticException {
super(conf);
opToPartPruner = new HashMap<TableScanOperator, ExprNodeDesc>();
opToPartList = new HashMap<TableScanOperator, PrunedPartitionList>();
opToSamplePruner = new HashMap<TableScanOperator, sampleDesc>();
nameToSplitSample = new HashMap<String, SplitSample>();
topOps = new HashMap<String, Operator<? extends OperatorDesc>>();
topSelOps = new HashMap<String, Operator<? extends OperatorDesc>>();
loadTableWork = new ArrayList<LoadTableDesc>();
loadFileWork = new ArrayList<LoadFileDesc>();
opParseCtx = new LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext>();
joinContext = new HashMap<JoinOperator, QBJoinTree>();
smbMapJoinContext = new HashMap<SMBMapJoinOperator, QBJoinTree>();
topToTable = new HashMap<TableScanOperator, Table>();
fsopToTable = new HashMap<FileSinkOperator, Table>();
reduceSinkOperatorsAddedByEnforceBucketingSorting = new ArrayList<ReduceSinkOperator>();
topToTableProps = new HashMap<TableScanOperator, Map<String, String>>();
destTableId = 1;
uCtx = null;
listMapJoinOpsNoReducer = new ArrayList<AbstractMapJoinOperator<? extends MapJoinDesc>>();
groupOpToInputTables = new HashMap<GroupByOperator, Set<String>>();
prunedPartitions = new HashMap<String, PrunedPartitionList>();
unparseTranslator = new UnparseTranslator(conf);
autogenColAliasPrfxLbl = HiveConf.getVar(conf,
HiveConf.ConfVars.HIVE_AUTOGEN_COLUMNALIAS_PREFIX_LABEL);
autogenColAliasPrfxIncludeFuncName = HiveConf.getBoolVar(conf,
HiveConf.ConfVars.HIVE_AUTOGEN_COLUMNALIAS_PREFIX_INCLUDEFUNCNAME);
queryProperties = new QueryProperties();
opToPartToSkewedPruner = new HashMap<TableScanOperator, Map<String, ExprNodeDesc>>();
aliasToCTEs = new HashMap<String, ASTNode>();
}
@Override
protected void reset() {
super.reset();
loadTableWork.clear();
loadFileWork.clear();
topOps.clear();
topSelOps.clear();
destTableId = 1;
idToTableNameMap.clear();
qb = null;
ast = null;
uCtx = null;
joinContext.clear();
smbMapJoinContext.clear();
opParseCtx.clear();
groupOpToInputTables.clear();
prunedPartitions.clear();
disableJoinMerge = false;
aliasToCTEs.clear();
topToTable.clear();
opToPartPruner.clear();
opToPartList.clear();
opToPartToSkewedPruner.clear();
opToSamplePruner.clear();
nameToSplitSample.clear();
fsopToTable.clear();
resultSchema = null;
createVwDesc = null;
viewsExpanded = null;
viewSelect = null;
ctesExpanded = null;
globalLimitCtx.disableOpt();
viewAliasToInput.clear();
reduceSinkOperatorsAddedByEnforceBucketingSorting.clear();
topToTableProps.clear();
listMapJoinOpsNoReducer.clear();
unparseTranslator.clear();
queryProperties.clear();
outputs.clear();
}
public void initParseCtx(ParseContext pctx) {
opToPartPruner = pctx.getOpToPartPruner();
opToPartList = pctx.getOpToPartList();
opToSamplePruner = pctx.getOpToSamplePruner();
topOps = pctx.getTopOps();
topSelOps = pctx.getTopSelOps();
opParseCtx = pctx.getOpParseCtx();
loadTableWork = pctx.getLoadTableWork();
loadFileWork = pctx.getLoadFileWork();
joinContext = pctx.getJoinContext();
smbMapJoinContext = pctx.getSmbMapJoinContext();
ctx = pctx.getContext();
destTableId = pctx.getDestTableId();
idToTableNameMap = pctx.getIdToTableNameMap();
uCtx = pctx.getUCtx();
listMapJoinOpsNoReducer = pctx.getListMapJoinOpsNoReducer();
qb = pctx.getQB();
groupOpToInputTables = pctx.getGroupOpToInputTables();
prunedPartitions = pctx.getPrunedPartitions();
fetchTask = pctx.getFetchTask();
setLineageInfo(pctx.getLineageInfo());
}
public ParseContext getParseContext() {
return new ParseContext(conf, qb, ast, opToPartPruner, opToPartList, topOps,
topSelOps, opParseCtx, joinContext, smbMapJoinContext, topToTable, topToTableProps,
fsopToTable, loadTableWork,
loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
listMapJoinOpsNoReducer, groupOpToInputTables, prunedPartitions,
opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks,
opToPartToSkewedPruner, viewAliasToInput,
reduceSinkOperatorsAddedByEnforceBucketingSorting,
queryProperties);
}
@SuppressWarnings("nls")
public void doPhase1QBExpr(ASTNode ast, QBExpr qbexpr, String id, String alias)
throws SemanticException {
assert (ast.getToken() != null);
switch (ast.getToken().getType()) {
case HiveParser.TOK_QUERY: {
QB qb = new QB(id, alias, true);
Phase1Ctx ctx_1 = initPhase1Ctx();
doPhase1(ast, qb, ctx_1);
qbexpr.setOpcode(QBExpr.Opcode.NULLOP);
qbexpr.setQB(qb);
}
break;
case HiveParser.TOK_UNION: {
qbexpr.setOpcode(QBExpr.Opcode.UNION);
// query 1
assert (ast.getChild(0) != null);
QBExpr qbexpr1 = new QBExpr(alias + "-subquery1");
doPhase1QBExpr((ASTNode) ast.getChild(0), qbexpr1, id + "-subquery1",
alias + "-subquery1");
qbexpr.setQBExpr1(qbexpr1);
// query 2
assert (ast.getChild(0) != null);
QBExpr qbexpr2 = new QBExpr(alias + "-subquery2");
doPhase1QBExpr((ASTNode) ast.getChild(1), qbexpr2, id + "-subquery2",
alias + "-subquery2");
qbexpr.setQBExpr2(qbexpr2);
}
break;
}
}
private LinkedHashMap<String, ASTNode> doPhase1GetAggregationsFromSelect(
ASTNode selExpr, QB qb, String dest) throws SemanticException {
// Iterate over the selects search for aggregation Trees.
// Use String as keys to eliminate duplicate trees.
LinkedHashMap<String, ASTNode> aggregationTrees = new LinkedHashMap<String, ASTNode>();
List<ASTNode> wdwFns = new ArrayList<ASTNode>();
for (int i = 0; i < selExpr.getChildCount(); ++i) {
ASTNode function = (ASTNode) selExpr.getChild(i).getChild(0);
doPhase1GetAllAggregations(function, aggregationTrees, wdwFns);
}
// window based aggregations are handled differently
for (ASTNode wdwFn : wdwFns) {
WindowingSpec spec = qb.getWindowingSpec(dest);
if(spec == null) {
queryProperties.setHasWindowing(true);
spec = new WindowingSpec();
qb.addDestToWindowingSpec(dest, spec);
}
HashMap<String, ASTNode> wExprsInDest = qb.getParseInfo().getWindowingExprsForClause(dest);
int wColIdx = spec.getWindowExpressions() == null ? 0 : spec.getWindowExpressions().size();
WindowFunctionSpec wFnSpec = processWindowFunction(wdwFn,
(ASTNode)wdwFn.getChild(wdwFn.getChildCount()-1));
// If this is a duplicate invocation of a function; don't add to WindowingSpec.
if ( wExprsInDest != null &&
wExprsInDest.containsKey(wFnSpec.getExpression().toStringTree())) {
continue;
}
wFnSpec.setAlias("_wcol" + wColIdx);
spec.addWindowFunction(wFnSpec);
qb.getParseInfo().addWindowingExprToClause(dest, wFnSpec.getExpression());
}
return aggregationTrees;
}
private void doPhase1GetColumnAliasesFromSelect(
ASTNode selectExpr, QBParseInfo qbp) {
for (int i = 0; i < selectExpr.getChildCount(); ++i) {
ASTNode selExpr = (ASTNode) selectExpr.getChild(i);
if ((selExpr.getToken().getType() == HiveParser.TOK_SELEXPR)
&& (selExpr.getChildCount() == 2)) {
String columnAlias = unescapeIdentifier(selExpr.getChild(1).getText());
qbp.setExprToColumnAlias((ASTNode) selExpr.getChild(0), columnAlias);
}
}
}
/**
* DFS-scan the expressionTree to find all aggregation subtrees and put them
* in aggregations.
*
* @param expressionTree
* @param aggregations
* the key to the HashTable is the toStringTree() representation of
* the aggregation subtree.
* @throws SemanticException
*/
private void doPhase1GetAllAggregations(ASTNode expressionTree,
HashMap<String, ASTNode> aggregations, List<ASTNode> wdwFns) throws SemanticException {
int exprTokenType = expressionTree.getToken().getType();
if (exprTokenType == HiveParser.TOK_FUNCTION
|| exprTokenType == HiveParser.TOK_FUNCTIONDI
|| exprTokenType == HiveParser.TOK_FUNCTIONSTAR) {
assert (expressionTree.getChildCount() != 0);
if (expressionTree.getChild(expressionTree.getChildCount()-1).getType()
== HiveParser.TOK_WINDOWSPEC) {
wdwFns.add(expressionTree);
return;
}
if (expressionTree.getChild(0).getType() == HiveParser.Identifier) {
String functionName = unescapeIdentifier(expressionTree.getChild(0)
.getText());
if(FunctionRegistry.impliesOrder(functionName)) {
throw new SemanticException(ErrorMsg.MISSING_OVER_CLAUSE.getMsg(functionName));
}
if (FunctionRegistry.getGenericUDAFResolver(functionName) != null) {
if(containsLeadLagUDF(expressionTree)) {
throw new SemanticException(ErrorMsg.MISSING_OVER_CLAUSE.getMsg(functionName));
}
aggregations.put(expressionTree.toStringTree(), expressionTree);
FunctionInfo fi = FunctionRegistry.getFunctionInfo(functionName);
if (!fi.isNative()) {
unparseTranslator.addIdentifierTranslation((ASTNode) expressionTree
.getChild(0));
}
return;
}
}
}
for (int i = 0; i < expressionTree.getChildCount(); i++) {
doPhase1GetAllAggregations((ASTNode) expressionTree.getChild(i),
aggregations, wdwFns);
}
}
private List<ASTNode> doPhase1GetDistinctFuncExprs(
HashMap<String, ASTNode> aggregationTrees) throws SemanticException {
List<ASTNode> exprs = new ArrayList<ASTNode>();
for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
ASTNode value = entry.getValue();
assert (value != null);
if (value.getToken().getType() == HiveParser.TOK_FUNCTIONDI) {
exprs.add(value);
}
}
return exprs;
}
public static String generateErrorMessage(ASTNode ast, String message) {
StringBuilder sb = new StringBuilder();
sb.append(ast.getLine());
sb.append(":");
sb.append(ast.getCharPositionInLine());
sb.append(" ");
sb.append(message);
sb.append(". Error encountered near token '");
sb.append(ErrorMsg.getText(ast));
sb.append("'");
return sb.toString();
}
/**
* Goes though the tabref tree and finds the alias for the table. Once found,
* it records the table name-> alias association in aliasToTabs. It also makes
* an association from the alias to the table AST in parse info.
*
* @return the alias of the table
*/
private String processTable(QB qb, ASTNode tabref) throws SemanticException {
// For each table reference get the table name
// and the alias (if alias is not present, the table name
// is used as an alias)
int aliasIndex = 0;
int propsIndex = -1;
int tsampleIndex = -1;
int ssampleIndex = -1;
for (int index = 1; index < tabref.getChildCount(); index++) {
ASTNode ct = (ASTNode) tabref.getChild(index);
if (ct.getToken().getType() == HiveParser.TOK_TABLEBUCKETSAMPLE) {
tsampleIndex = index;
} else if (ct.getToken().getType() == HiveParser.TOK_TABLESPLITSAMPLE) {
ssampleIndex = index;
} else if (ct.getToken().getType() == HiveParser.TOK_TABLEPROPERTIES) {
propsIndex = index;
} else {
aliasIndex = index;
}
}
ASTNode tableTree = (ASTNode) (tabref.getChild(0));
String tabIdName = getUnescapedName(tableTree);
String alias;
if (aliasIndex != 0) {
alias = unescapeIdentifier(tabref.getChild(aliasIndex).getText());
}
else {
alias = getUnescapedUnqualifiedTableName(tableTree);
}
if (propsIndex >= 0) {
Tree propsAST = tabref.getChild(propsIndex);
Map<String, String> props = DDLSemanticAnalyzer.getProps((ASTNode) propsAST.getChild(0));
qb.setTabProps(alias, props);
}
// If the alias is already there then we have a conflict
if (qb.exists(alias)) {
throw new SemanticException(ErrorMsg.AMBIGUOUS_TABLE_ALIAS.getMsg(tabref
.getChild(aliasIndex)));
}
if (tsampleIndex >= 0) {
ASTNode sampleClause = (ASTNode) tabref.getChild(tsampleIndex);
ArrayList<ASTNode> sampleCols = new ArrayList<ASTNode>();
if (sampleClause.getChildCount() > 2) {
for (int i = 2; i < sampleClause.getChildCount(); i++) {
sampleCols.add((ASTNode) sampleClause.getChild(i));
}
}
// TODO: For now only support sampling on up to two columns
// Need to change it to list of columns
if (sampleCols.size() > 2) {
throw new SemanticException(generateErrorMessage(
(ASTNode) tabref.getChild(0),
ErrorMsg.SAMPLE_RESTRICTION.getMsg()));
}
qb.getParseInfo().setTabSample(
alias,
new TableSample(
unescapeIdentifier(sampleClause.getChild(0).getText()),
unescapeIdentifier(sampleClause.getChild(1).getText()),
sampleCols));
if (unparseTranslator.isEnabled()) {
for (ASTNode sampleCol : sampleCols) {
unparseTranslator.addIdentifierTranslation((ASTNode) sampleCol
.getChild(0));
}
}
} else if (ssampleIndex >= 0) {
ASTNode sampleClause = (ASTNode) tabref.getChild(ssampleIndex);
Tree type = sampleClause.getChild(0);
Tree numerator = sampleClause.getChild(1);
String value = unescapeIdentifier(numerator.getText());
SplitSample sample;
if (type.getType() == HiveParser.TOK_PERCENT) {
assertCombineInputFormat(numerator, "Percentage");
Double percent = Double.valueOf(value).doubleValue();
if (percent < 0 || percent > 100) {
throw new SemanticException(generateErrorMessage((ASTNode) numerator,
"Sampling percentage should be between 0 and 100"));
}
int seedNum = conf.getIntVar(ConfVars.HIVESAMPLERANDOMNUM);
sample = new SplitSample(percent, seedNum);
} else if (type.getType() == HiveParser.TOK_ROWCOUNT) {
sample = new SplitSample(Integer.valueOf(value));
} else {
assert type.getType() == HiveParser.TOK_LENGTH;
assertCombineInputFormat(numerator, "Total Length");
long length = Integer.valueOf(value.substring(0, value.length() - 1));
char last = value.charAt(value.length() - 1);
if (last == 'k' || last == 'K') {
length <<= 10;
} else if (last == 'm' || last == 'M') {
length <<= 20;
} else if (last == 'g' || last == 'G') {
length <<= 30;
}
int seedNum = conf.getIntVar(ConfVars.HIVESAMPLERANDOMNUM);
sample = new SplitSample(length, seedNum);
}
String alias_id = getAliasId(alias, qb);
nameToSplitSample.put(alias_id, sample);
}
// Insert this map into the stats
qb.setTabAlias(alias, tabIdName);
qb.addAlias(alias);
qb.getParseInfo().setSrcForAlias(alias, tableTree);
unparseTranslator.addTableNameTranslation(tableTree, SessionState.get().getCurrentDatabase());
if (aliasIndex != 0) {
unparseTranslator.addIdentifierTranslation((ASTNode) tabref
.getChild(aliasIndex));
}
return alias;
}
private void assertCombineInputFormat(Tree numerator, String message) throws SemanticException {
String inputFormat = conf.getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez") ?
HiveConf.getVar(conf, HiveConf.ConfVars.HIVETEZINPUTFORMAT):
HiveConf.getVar(conf, HiveConf.ConfVars.HIVEINPUTFORMAT);
if (!inputFormat.equals(CombineHiveInputFormat.class.getName())) {
throw new SemanticException(generateErrorMessage((ASTNode) numerator,
message + " sampling is not supported in " + inputFormat));
}
}
private String processSubQuery(QB qb, ASTNode subq) throws SemanticException {
// This is a subquery and must have an alias
if (subq.getChildCount() != 2) {
throw new SemanticException(ErrorMsg.NO_SUBQUERY_ALIAS.getMsg(subq));
}
ASTNode subqref = (ASTNode) subq.getChild(0);
String alias = unescapeIdentifier(subq.getChild(1).getText());
// Recursively do the first phase of semantic analysis for the subquery
QBExpr qbexpr = new QBExpr(alias);
doPhase1QBExpr(subqref, qbexpr, qb.getId(), alias);
// If the alias is already there then we have a conflict
if (qb.exists(alias)) {
throw new SemanticException(ErrorMsg.AMBIGUOUS_TABLE_ALIAS.getMsg(subq
.getChild(1)));
}
// Insert this map into the stats
qb.setSubqAlias(alias, qbexpr);
qb.addAlias(alias);
unparseTranslator.addIdentifierTranslation((ASTNode) subq.getChild(1));
return alias;
}
/*
* Phase1: hold onto any CTE definitions in aliasToCTE.
* CTE definitions are global to the Query.
*/
private void processCTE(QB qb, ASTNode ctes) throws SemanticException {
int numCTEs = ctes.getChildCount();
for(int i=0; i <numCTEs; i++) {
ASTNode cte = (ASTNode) ctes.getChild(i);
ASTNode cteQry = (ASTNode) cte.getChild(0);
String alias = unescapeIdentifier(cte.getChild(1).getText());
String qName = qb.getId() == null ? "" : qb.getId() + ":";
qName += alias.toLowerCase();
if ( aliasToCTEs.containsKey(qName)) {
throw new SemanticException(ErrorMsg.AMBIGUOUS_TABLE_ALIAS.getMsg(cte.getChild(1)));
}
aliasToCTEs.put(qName, cteQry);
}
}
/*
* We allow CTE definitions in views. So we can end up with a hierarchy of CTE definitions:
* - at the top level of a query statement
* - where a view is referenced.
* - views may refer to other views.
*
* The scoping rules we use are: to search for a CTE from the current QB outwards. In order to
* disambiguate between CTES are different levels we qualify(prefix) them with the id of the QB
* they appear in when adding them to the <code>aliasToCTEs</code> map.
*
*/
private ASTNode findCTEFromName(QB qb, String cteName) {
/*
* When saving a view definition all table references in the AST are qualified; including CTE references.
* Where as CTE definitions have no DB qualifier; so we strip out the DB qualifier before searching in
* <code>aliasToCTEs</code> map.
*/
String currDB = SessionState.get().getCurrentDatabase();
if ( currDB != null && cteName.startsWith(currDB) &&
cteName.length() > currDB.length() &&
cteName.charAt(currDB.length()) == '.' ) {
cteName = cteName.substring(currDB.length() + 1);
}
StringBuffer qId = new StringBuffer();
if (qb.getId() != null) {
qId.append(qb.getId());
}
while (qId.length() > 0) {
String nm = qId + ":" + cteName;
if (aliasToCTEs.containsKey(nm)) {
return aliasToCTEs.get(nm);
}
int lastIndex = qId.lastIndexOf(":");
lastIndex = lastIndex < 0 ? 0 : lastIndex;
qId.setLength(lastIndex);
}
return aliasToCTEs.get(cteName);
}
/*
* If a CTE is referenced in a QueryBlock:
* - add it as a SubQuery for now.
* - SQ.alias is the alias used in QB. (if no alias is specified,
* it used the CTE name. Works just like table references)
* - Adding SQ done by:
* - copying AST of CTE
* - setting ASTOrigin on cloned AST.
* - trigger phase 1 on new QBExpr.
* - update QB data structs: remove this as a table reference, move it to a SQ invocation.
*/
private void addCTEAsSubQuery(QB qb, String cteName, String cteAlias) throws SemanticException {
cteAlias = cteAlias == null ? cteName : cteAlias;
ASTNode cteQryNode = findCTEFromName(qb, cteName);
QBExpr cteQBExpr = new QBExpr(cteAlias);
String cteText = ctx.getTokenRewriteStream().toString(
cteQryNode.getTokenStartIndex(), cteQryNode.getTokenStopIndex());
final ASTNodeOrigin cteOrigin = new ASTNodeOrigin("CTE", cteName,
cteText, cteAlias, cteQryNode);
cteQryNode = (ASTNode) ParseDriver.adaptor.dupTree(cteQryNode);
SubQueryUtils.setOriginDeep(cteQryNode, cteOrigin);
doPhase1QBExpr(cteQryNode, cteQBExpr, qb.getId(), cteAlias);
qb.rewriteCTEToSubq(cteAlias, cteName, cteQBExpr);
}
private boolean isJoinToken(ASTNode node) {
if ((node.getToken().getType() == HiveParser.TOK_JOIN)
|| (node.getToken().getType() == HiveParser.TOK_CROSSJOIN)
|| (node.getToken().getType() == HiveParser.TOK_LEFTOUTERJOIN)
|| (node.getToken().getType() == HiveParser.TOK_RIGHTOUTERJOIN)
|| (node.getToken().getType() == HiveParser.TOK_FULLOUTERJOIN)
|| (node.getToken().getType() == HiveParser.TOK_LEFTSEMIJOIN)
|| (node.getToken().getType() == HiveParser.TOK_UNIQUEJOIN)) {
return true;
}
return false;
}
/**
* Given the AST with TOK_JOIN as the root, get all the aliases for the tables
* or subqueries in the join.
*
* @param qb
* @param join
* @throws SemanticException
*/
@SuppressWarnings("nls")
private void processJoin(QB qb, ASTNode join) throws SemanticException {
int numChildren = join.getChildCount();
if ((numChildren != 2) && (numChildren != 3)
&& join.getToken().getType() != HiveParser.TOK_UNIQUEJOIN) {
throw new SemanticException(generateErrorMessage(join,
"Join with multiple children"));
}
for (int num = 0; num < numChildren; num++) {
ASTNode child = (ASTNode) join.getChild(num);
if (child.getToken().getType() == HiveParser.TOK_TABREF) {
processTable(qb, child);
} else if (child.getToken().getType() == HiveParser.TOK_SUBQUERY) {
processSubQuery(qb, child);
} else if (child.getToken().getType() == HiveParser.TOK_PTBLFUNCTION) {
queryProperties.setHasPTF(true);
processPTF(qb, child);
PTFInvocationSpec ptfInvocationSpec = qb.getPTFInvocationSpec(child);
String inputAlias = ptfInvocationSpec == null ? null :
ptfInvocationSpec.getFunction().getAlias();;
if ( inputAlias == null ) {
throw new SemanticException(generateErrorMessage(child,
"PTF invocation in a Join must have an alias"));
}
} else if (child.getToken().getType() == HiveParser.TOK_LATERAL_VIEW ||
child.getToken().getType() == HiveParser.TOK_LATERAL_VIEW_OUTER) {
// SELECT * FROM src1 LATERAL VIEW udtf() AS myTable JOIN src2 ...
// is not supported. Instead, the lateral view must be in a subquery
// SELECT * FROM (SELECT * FROM src1 LATERAL VIEW udtf() AS myTable) a
// JOIN src2 ...
throw new SemanticException(ErrorMsg.LATERAL_VIEW_WITH_JOIN
.getMsg(join));
} else if (isJoinToken(child)) {
processJoin(qb, child);
}
}
}
/**
* Given the AST with TOK_LATERAL_VIEW as the root, get the alias for the
* table or subquery in the lateral view and also make a mapping from the
* alias to all the lateral view AST's.
*
* @param qb
* @param lateralView
* @return the alias for the table/subquery
* @throws SemanticException
*/
private String processLateralView(QB qb, ASTNode lateralView)
throws SemanticException {
int numChildren = lateralView.getChildCount();
assert (numChildren == 2);
ASTNode next = (ASTNode) lateralView.getChild(1);
String alias = null;
switch (next.getToken().getType()) {
case HiveParser.TOK_TABREF:
alias = processTable(qb, next);
break;
case HiveParser.TOK_SUBQUERY:
alias = processSubQuery(qb, next);
break;
case HiveParser.TOK_LATERAL_VIEW:
case HiveParser.TOK_LATERAL_VIEW_OUTER:
alias = processLateralView(qb, next);
break;
default:
throw new SemanticException(ErrorMsg.LATERAL_VIEW_INVALID_CHILD
.getMsg(lateralView));
}
alias = alias.toLowerCase();
qb.getParseInfo().addLateralViewForAlias(alias, lateralView);
qb.addAlias(alias);
return alias;
}
/**
* Phase 1: (including, but not limited to):
*
* 1. Gets all the aliases for all the tables / subqueries and makes the
* appropriate mapping in aliasToTabs, aliasToSubq 2. Gets the location of the
* destination and names the clause "inclause" + i 3. Creates a map from a
* string representation of an aggregation tree to the actual aggregation AST
* 4. Creates a mapping from the clause name to the select expression AST in
* destToSelExpr 5. Creates a mapping from a table alias to the lateral view
* AST's in aliasToLateralViews
*
* @param ast
* @param qb
* @param ctx_1
* @throws SemanticException
*/
@SuppressWarnings({"fallthrough", "nls"})
public boolean doPhase1(ASTNode ast, QB qb, Phase1Ctx ctx_1)
throws SemanticException {
boolean phase1Result = true;
QBParseInfo qbp = qb.getParseInfo();
boolean skipRecursion = false;
if (ast.getToken() != null) {
skipRecursion = true;
switch (ast.getToken().getType()) {
case HiveParser.TOK_SELECTDI:
qb.countSelDi();
// fall through
case HiveParser.TOK_SELECT:
qb.countSel();
qbp.setSelExprForClause(ctx_1.dest, ast);
if (((ASTNode) ast.getChild(0)).getToken().getType() == HiveParser.TOK_HINTLIST) {
qbp.setHints((ASTNode) ast.getChild(0));
}
LinkedHashMap<String, ASTNode> aggregations = doPhase1GetAggregationsFromSelect(ast,
qb, ctx_1.dest);
doPhase1GetColumnAliasesFromSelect(ast, qbp);
qbp.setAggregationExprsForClause(ctx_1.dest, aggregations);
qbp.setDistinctFuncExprsForClause(ctx_1.dest,
doPhase1GetDistinctFuncExprs(aggregations));
break;
case HiveParser.TOK_WHERE:
qbp.setWhrExprForClause(ctx_1.dest, ast);
if (!SubQueryUtils.findSubQueries((ASTNode) ast.getChild(0)).isEmpty())
queryProperties.setFilterWithSubQuery(true);
break;
case HiveParser.TOK_INSERT_INTO:
String currentDatabase = SessionState.get().getCurrentDatabase();
String tab_name = getUnescapedName((ASTNode) ast.getChild(0).getChild(0), currentDatabase);
qbp.addInsertIntoTable(tab_name);
// TODO: is this supposed to fall thru?
case HiveParser.TOK_DESTINATION:
ctx_1.dest = "insclause-" + ctx_1.nextNum;
ctx_1.nextNum++;
// is there a insert in the subquery
if (qbp.getIsSubQ()) {
ASTNode ch = (ASTNode) ast.getChild(0);
if ((ch.getToken().getType() != HiveParser.TOK_DIR)
|| (((ASTNode) ch.getChild(0)).getToken().getType() != HiveParser.TOK_TMP_FILE)) {
throw new SemanticException(ErrorMsg.NO_INSERT_INSUBQUERY
.getMsg(ast));
}
}
qbp.setDestForClause(ctx_1.dest, (ASTNode) ast.getChild(0));
if (qbp.getClauseNamesForDest().size() > 1)
queryProperties.setMultiDestQuery(true);
break;
case HiveParser.TOK_FROM:
int child_count = ast.getChildCount();
if (child_count != 1) {
throw new SemanticException(generateErrorMessage(ast,
"Multiple Children " + child_count));
}
// Check if this is a subquery / lateral view
ASTNode frm = (ASTNode) ast.getChild(0);
if (frm.getToken().getType() == HiveParser.TOK_TABREF) {
processTable(qb, frm);
} else if (frm.getToken().getType() == HiveParser.TOK_SUBQUERY) {
processSubQuery(qb, frm);
} else if (frm.getToken().getType() == HiveParser.TOK_LATERAL_VIEW ||
frm.getToken().getType() == HiveParser.TOK_LATERAL_VIEW_OUTER) {
processLateralView(qb, frm);
} else if (isJoinToken(frm)) {
processJoin(qb, frm);
qbp.setJoinExpr(frm);
}else if(frm.getToken().getType() == HiveParser.TOK_PTBLFUNCTION){
queryProperties.setHasPTF(true);
processPTF(qb, frm);
}
break;
case HiveParser.TOK_CLUSTERBY:
// Get the clusterby aliases - these are aliased to the entries in the
// select list
queryProperties.setHasClusterBy(true);
qbp.setClusterByExprForClause(ctx_1.dest, ast);
break;
case HiveParser.TOK_DISTRIBUTEBY:
// Get the distribute by aliases - these are aliased to the entries in
// the
// select list
queryProperties.setHasDistributeBy(true);
qbp.setDistributeByExprForClause(ctx_1.dest, ast);
if (qbp.getClusterByForClause(ctx_1.dest) != null) {
throw new SemanticException(generateErrorMessage(ast,
ErrorMsg.CLUSTERBY_DISTRIBUTEBY_CONFLICT.getMsg()));
} else if (qbp.getOrderByForClause(ctx_1.dest) != null) {
throw new SemanticException(generateErrorMessage(ast,
ErrorMsg.ORDERBY_DISTRIBUTEBY_CONFLICT.getMsg()));
}
break;
case HiveParser.TOK_SORTBY:
// Get the sort by aliases - these are aliased to the entries in the
// select list
queryProperties.setHasSortBy(true);
qbp.setSortByExprForClause(ctx_1.dest, ast);
if (qbp.getClusterByForClause(ctx_1.dest) != null) {
throw new SemanticException(generateErrorMessage(ast,
ErrorMsg.CLUSTERBY_SORTBY_CONFLICT.getMsg()));
} else if (qbp.getOrderByForClause(ctx_1.dest) != null) {
throw new SemanticException(generateErrorMessage(ast,
ErrorMsg.ORDERBY_SORTBY_CONFLICT.getMsg()));
}
break;
case HiveParser.TOK_ORDERBY:
// Get the order by aliases - these are aliased to the entries in the
// select list
queryProperties.setHasOrderBy(true);
qbp.setOrderByExprForClause(ctx_1.dest, ast);
if (qbp.getClusterByForClause(ctx_1.dest) != null) {
throw new SemanticException(generateErrorMessage(ast,
ErrorMsg.CLUSTERBY_ORDERBY_CONFLICT.getMsg()));
}
break;
case HiveParser.TOK_GROUPBY:
case HiveParser.TOK_ROLLUP_GROUPBY:
case HiveParser.TOK_CUBE_GROUPBY:
case HiveParser.TOK_GROUPING_SETS:
// Get the groupby aliases - these are aliased to the entries in the
// select list
queryProperties.setHasGroupBy(true);
if (qbp.getJoinExpr() != null) {
queryProperties.setHasJoinFollowedByGroupBy(true);
}
if (qbp.getSelForClause(ctx_1.dest).getToken().getType() == HiveParser.TOK_SELECTDI) {
throw new SemanticException(generateErrorMessage(ast,
ErrorMsg.SELECT_DISTINCT_WITH_GROUPBY.getMsg()));
}
qbp.setGroupByExprForClause(ctx_1.dest, ast);
skipRecursion = true;
// Rollup and Cubes are syntactic sugar on top of grouping sets
if (ast.getToken().getType() == HiveParser.TOK_ROLLUP_GROUPBY) {
qbp.getDestRollups().add(ctx_1.dest);
} else if (ast.getToken().getType() == HiveParser.TOK_CUBE_GROUPBY) {
qbp.getDestCubes().add(ctx_1.dest);
} else if (ast.getToken().getType() == HiveParser.TOK_GROUPING_SETS) {
qbp.getDestGroupingSets().add(ctx_1.dest);
}
break;
case HiveParser.TOK_HAVING:
qbp.setHavingExprForClause(ctx_1.dest, ast);
qbp.addAggregationExprsForClause(ctx_1.dest,
doPhase1GetAggregationsFromSelect(ast, qb, ctx_1.dest));
break;
case HiveParser.KW_WINDOW:
if (!qb.hasWindowingSpec(ctx_1.dest) ) {
throw new SemanticException(generateErrorMessage(ast,
"Query has no Cluster/Distribute By; but has a Window definition"));
}
handleQueryWindowClauses(qb, ctx_1, ast);
break;
case HiveParser.TOK_LIMIT:
qbp.setDestLimit(ctx_1.dest, new Integer(ast.getChild(0).getText()));
break;
case HiveParser.TOK_ANALYZE:
// Case of analyze command
String table_name = getUnescapedName((ASTNode) ast.getChild(0).getChild(0));
qb.setTabAlias(table_name, table_name);
qb.addAlias(table_name);
qb.getParseInfo().setIsAnalyzeCommand(true);
qb.getParseInfo().setNoScanAnalyzeCommand(this.noscan);
qb.getParseInfo().setPartialScanAnalyzeCommand(this.partialscan);
// Allow analyze the whole table and dynamic partitions
HiveConf.setVar(conf, HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict");
HiveConf.setVar(conf, HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict");
break;
case HiveParser.TOK_UNION:
if (!qbp.getIsSubQ()) {
// this shouldn't happen. The parser should have converted the union to be
// contained in a subquery. Just in case, we keep the error as a fallback.
throw new SemanticException(generateErrorMessage(ast,
ErrorMsg.UNION_NOTIN_SUBQ.getMsg()));
}
skipRecursion = false;
break;
case HiveParser.TOK_INSERT:
ASTNode destination = (ASTNode) ast.getChild(0);
Tree tab = destination.getChild(0);
// Proceed if AST contains partition & If Not Exists
if (destination.getChildCount() == 2 &&
tab.getChildCount() == 2 &&
destination.getChild(1).getType() == HiveParser.TOK_IFNOTEXISTS) {
String tableName = tab.getChild(0).getChild(0).getText();
Tree partitions = tab.getChild(1);
int childCount = partitions.getChildCount();
HashMap<String, String> partition = new HashMap<String, String>();
for (int i = 0; i < childCount; i++) {
String partitionName = partitions.getChild(i).getChild(0).getText();
Tree pvalue = partitions.getChild(i).getChild(1);
if (pvalue == null) {
break;
}
String partitionVal = stripQuotes(pvalue.getText());
partition.put(partitionName, partitionVal);
}
// if it is a dynamic partition throw the exception
if (childCount != partition.size()) {
throw new SemanticException(ErrorMsg.INSERT_INTO_DYNAMICPARTITION_IFNOTEXISTS
.getMsg(partition.toString()));
}
Table table = null;
try {
table = db.getTable(tableName);
} catch (HiveException ex) {
throw new SemanticException(ex);
}
try {
Partition parMetaData = db.getPartition(table, partition, false);
// Check partition exists if it exists skip the overwrite
if (parMetaData != null) {
phase1Result = false;
skipRecursion = true;
LOG.info("Partition already exists so insert into overwrite " +
"skipped for partition : " + parMetaData.toString());
break;
}
} catch (HiveException e) {
LOG.info("Error while getting metadata : ", e);
}
validatePartSpec(table, partition, (ASTNode)tab, conf, false);
}
skipRecursion = false;
break;
case HiveParser.TOK_LATERAL_VIEW:
case HiveParser.TOK_LATERAL_VIEW_OUTER:
// todo: nested LV
assert ast.getChildCount() == 1;
qb.getParseInfo().getDestToLateralView().put(ctx_1.dest, ast);
break;
case HiveParser.TOK_CTE:
processCTE(qb, ast);
break;
default:
skipRecursion = false;
break;
}
}
if (!skipRecursion) {
// Iterate over the rest of the children
int child_count = ast.getChildCount();
for (int child_pos = 0; child_pos < child_count && phase1Result; ++child_pos) {
// Recurse
phase1Result = phase1Result && doPhase1((ASTNode) ast.getChild(child_pos), qb, ctx_1);
}
}
return phase1Result;
}
private void getMetaData(QBExpr qbexpr) throws SemanticException {
getMetaData(qbexpr, null);
}
private void getMetaData(QBExpr qbexpr, ReadEntity parentInput)
throws SemanticException {
if (qbexpr.getOpcode() == QBExpr.Opcode.NULLOP) {
getMetaData(qbexpr.getQB(), parentInput);
} else {
getMetaData(qbexpr.getQBExpr1(), parentInput);
getMetaData(qbexpr.getQBExpr2(), parentInput);
}
}
public Table getTable(TableScanOperator ts) {
return topToTable.get(ts);
}
public void getMetaData(QB qb) throws SemanticException {
getMetaData(qb, null);
}
@SuppressWarnings("nls")
public void getMetaData(QB qb, ReadEntity parentInput) throws SemanticException {
try {
LOG.info("Get metadata for source tables");
// Go over the tables and populate the related structures.
// We have to materialize the table alias list since we might
// modify it in the middle for view rewrite.
List<String> tabAliases = new ArrayList<String>(qb.getTabAliases());
// Keep track of view alias to view name and read entity
// For eg: for a query like 'select * from V3', where V3 -> V2, V2 -> V1, V1 -> T
// keeps track of full view name and read entity corresponding to alias V3, V3:V2, V3:V2:V1.
// This is needed for tracking the dependencies for inputs, along with their parents.
Map<String, ObjectPair<String, ReadEntity>> aliasToViewInfo =
new HashMap<String, ObjectPair<String, ReadEntity>>();
/*
* used to capture view to SQ conversions. This is used to check for
* recursive CTE invocations.
*/
Map<String, String> sqAliasToCTEName = new HashMap<String, String>();
for (String alias : tabAliases) {
String tab_name = qb.getTabNameForAlias(alias);
Table tab = null;
try {
tab = db.getTable(tab_name);
} catch (InvalidTableException ite) {
/*
* if this s a CTE reference:
* Add its AST as a SubQuery to this QB.
*/
ASTNode cteNode = findCTEFromName(qb, tab_name.toLowerCase());
if ( cteNode != null ) {
String cte_name = tab_name.toLowerCase();
if (ctesExpanded.contains(cte_name)) {
throw new SemanticException("Recursive cte " + tab_name +
" detected (cycle: " + StringUtils.join(ctesExpanded, " -> ") +
" -> " + tab_name + ").");
}
addCTEAsSubQuery(qb, cte_name, alias);
sqAliasToCTEName.put(alias, cte_name);
continue;
}
ASTNode src = qb.getParseInfo().getSrcForAlias(alias);
if (null != src) {
throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(src));
} else {
throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(alias));
}
}
// Disallow INSERT INTO on bucketized tables
if (qb.getParseInfo().isInsertIntoTable(tab.getDbName(), tab.getTableName()) &&
tab.getNumBuckets() > 0) {
throw new SemanticException(ErrorMsg.INSERT_INTO_BUCKETIZED_TABLE.
getMsg("Table: " + tab_name));
}
// We check offline of the table, as if people only select from an
// non-existing partition of an offline table, the partition won't
// be added to inputs and validate() won't have the information to
// check the table's offline status.
// TODO: Modify the code to remove the checking here and consolidate
// it in validate()
//
if (tab.isOffline()) {
throw new SemanticException(ErrorMsg.OFFLINE_TABLE_OR_PARTITION.
getMsg("Table " + getUnescapedName(qb.getParseInfo().getSrcForAlias(alias))));
}
if (tab.isView()) {
if (qb.getParseInfo().isAnalyzeCommand()) {
throw new SemanticException(ErrorMsg.ANALYZE_VIEW.getMsg());
}
String fullViewName = tab.getDbName() + "." + tab.getTableName();
// Prevent view cycles
if (viewsExpanded.contains(fullViewName)) {
throw new SemanticException("Recursive view " + fullViewName +
" detected (cycle: " + StringUtils.join(viewsExpanded, " -> ") +
" -> " + fullViewName + ").");
}
replaceViewReferenceWithDefinition(qb, tab, tab_name, alias);
// This is the last time we'll see the Table objects for views, so add it to the inputs
// now
ReadEntity viewInput = new ReadEntity(tab, parentInput);
viewInput = PlanUtils.addInput(inputs, viewInput);
aliasToViewInfo.put(alias, new ObjectPair<String, ReadEntity>(fullViewName, viewInput));
viewAliasToInput.put(getAliasId(alias, qb), viewInput);
continue;
}
if (!InputFormat.class.isAssignableFrom(tab.getInputFormatClass())) {
throw new SemanticException(generateErrorMessage(
qb.getParseInfo().getSrcForAlias(alias),
ErrorMsg.INVALID_INPUT_FORMAT_TYPE.getMsg()));
}
qb.getMetaData().setSrcForAlias(alias, tab);
if (qb.getParseInfo().isAnalyzeCommand()) {
// allow partial partition specification for nonscan since noscan is fast.
tableSpec ts = new tableSpec(db, conf, (ASTNode) ast.getChild(0), true, this.noscan);
if (ts.specType == SpecType.DYNAMIC_PARTITION) { // dynamic partitions
try {
ts.partitions = db.getPartitionsByNames(ts.tableHandle, ts.partSpec);
} catch (HiveException e) {
throw new SemanticException(generateErrorMessage(
qb.getParseInfo().getSrcForAlias(alias),
"Cannot get partitions for " + ts.partSpec), e);
}
}
// validate partial scan command
QBParseInfo qbpi = qb.getParseInfo();
if (qbpi.isPartialScanAnalyzeCommand()) {
Class<? extends InputFormat> inputFormatClass = null;
switch (ts.specType) {
case TABLE_ONLY:
case DYNAMIC_PARTITION:
inputFormatClass = ts.tableHandle.getInputFormatClass();
break;
case STATIC_PARTITION:
inputFormatClass = ts.partHandle.getInputFormatClass();
break;
default:
assert false;
}
// throw a HiveException for formats other than rcfile or orcfile.
if (!(inputFormatClass.equals(RCFileInputFormat.class) || inputFormatClass
.equals(OrcInputFormat.class))) {
throw new SemanticException(ErrorMsg.ANALYZE_TABLE_PARTIALSCAN_NON_RCFILE.getMsg());
}
}
qb.getParseInfo().addTableSpec(alias, ts);
}
}
LOG.info("Get metadata for subqueries");
// Go over the subqueries and getMetaData for these
for (String alias : qb.getSubqAliases()) {
boolean wasView = aliasToViewInfo.containsKey(alias);
boolean wasCTE = sqAliasToCTEName.containsKey(alias);
ReadEntity newParentInput = null;
if (wasView) {
viewsExpanded.add(aliasToViewInfo.get(alias).getFirst());
newParentInput = aliasToViewInfo.get(alias).getSecond();
} else if (wasCTE) {
ctesExpanded.add(sqAliasToCTEName.get(alias));
}
QBExpr qbexpr = qb.getSubqForAlias(alias);
getMetaData(qbexpr, newParentInput);
if (wasView) {
viewsExpanded.remove(viewsExpanded.size() - 1);
} else if (wasCTE) {
ctesExpanded.remove(ctesExpanded.size() - 1);
}
}
RowFormatParams rowFormatParams = new RowFormatParams();
AnalyzeCreateCommonVars shared = new AnalyzeCreateCommonVars();
StorageFormat storageFormat = new StorageFormat();
LOG.info("Get metadata for destination tables");
// Go over all the destination structures and populate the related
// metadata
QBParseInfo qbp = qb.getParseInfo();
for (String name : qbp.getClauseNamesForDest()) {
ASTNode ast = qbp.getDestForClause(name);
switch (ast.getToken().getType()) {
case HiveParser.TOK_TAB: {
tableSpec ts = new tableSpec(db, conf, ast);
if (ts.tableHandle.isView()) {
throw new SemanticException(ErrorMsg.DML_AGAINST_VIEW.getMsg());
}
Class<?> outputFormatClass = ts.tableHandle.getOutputFormatClass();
if (!HiveOutputFormat.class.isAssignableFrom(outputFormatClass)) {
throw new SemanticException(ErrorMsg.INVALID_OUTPUT_FORMAT_TYPE
.getMsg(ast, "The class is " + outputFormatClass.toString()));
}
// tableSpec ts is got from the query (user specified),
// which means the user didn't specify partitions in their query,
// but whether the table itself is partitioned is not know.
if (ts.specType != SpecType.STATIC_PARTITION) {
// This is a table or dynamic partition
qb.getMetaData().setDestForAlias(name, ts.tableHandle);
// has dynamic as well as static partitions
if (ts.partSpec != null && ts.partSpec.size() > 0) {
qb.getMetaData().setPartSpecForAlias(name, ts.partSpec);
}
} else {
// This is a partition
qb.getMetaData().setDestForAlias(name, ts.partHandle);
}
if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
// Set that variable to automatically collect stats during the MapReduce job
qb.getParseInfo().setIsInsertToTable(true);
// Add the table spec for the destination table.
qb.getParseInfo().addTableSpec(ts.tableName.toLowerCase(), ts);
}
break;
}
case HiveParser.TOK_LOCAL_DIR:
case HiveParser.TOK_DIR: {
// This is a dfs file
String fname = stripQuotes(ast.getChild(0).getText());
if ((!qb.getParseInfo().getIsSubQ())
&& (((ASTNode) ast.getChild(0)).getToken().getType() == HiveParser.TOK_TMP_FILE)) {
if (qb.isCTAS()) {
qb.setIsQuery(false);
ctx.setResDir(null);
ctx.setResFile(null);
// allocate a temporary output dir on the location of the table
String tableName = getUnescapedName((ASTNode) ast.getChild(0));
Table newTable = db.newTable(tableName);
Path location;
try {
Warehouse wh = new Warehouse(conf);
location = wh.getDatabasePath(db.getDatabase(newTable.getDbName()));
} catch (MetaException e) {
throw new SemanticException(e);
}
try {
fname = ctx.getExternalTmpPath(
FileUtils.makeQualified(location, conf).toUri()).toString();
} catch (Exception e) {
throw new SemanticException(generateErrorMessage(ast,
"Error creating temporary folder on: " + location.toString()), e);
}
if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
tableSpec ts = new tableSpec(db, conf, this.ast);
// Set that variable to automatically collect stats during the MapReduce job
qb.getParseInfo().setIsInsertToTable(true);
// Add the table spec for the destination table.
qb.getParseInfo().addTableSpec(ts.tableName.toLowerCase(), ts);
}
} else {
qb.setIsQuery(true);
fname = ctx.getMRTmpPath().toString();
ctx.setResDir(new Path(fname));
}
}
qb.getMetaData().setDestForAlias(name, fname,
(ast.getToken().getType() == HiveParser.TOK_DIR));
CreateTableDesc localDirectoryDesc = new CreateTableDesc();
boolean localDirectoryDescIsSet = false;
int numCh = ast.getChildCount();
for (int num = 1; num < numCh ; num++){
ASTNode child = (ASTNode) ast.getChild(num);
if (ast.getChild(num) != null){
switch (child.getToken().getType()) {
case HiveParser.TOK_TABLEROWFORMAT:
rowFormatParams.analyzeRowFormat(shared, child);
localDirectoryDesc.setFieldDelim(rowFormatParams.fieldDelim);
localDirectoryDesc.setLineDelim(rowFormatParams.lineDelim);
localDirectoryDesc.setCollItemDelim(rowFormatParams.collItemDelim);
localDirectoryDesc.setMapKeyDelim(rowFormatParams.mapKeyDelim);
localDirectoryDesc.setFieldEscape(rowFormatParams.fieldEscape);
localDirectoryDesc.setNullFormat(rowFormatParams.nullFormat);
localDirectoryDescIsSet=true;
break;
case HiveParser.TOK_TABLESERIALIZER:
ASTNode serdeChild = (ASTNode) child.getChild(0);
shared.serde = unescapeSQLString(serdeChild.getChild(0).getText());
localDirectoryDesc.setSerName(shared.serde);
localDirectoryDescIsSet=true;
break;
case HiveParser.TOK_TBLSEQUENCEFILE:
case HiveParser.TOK_TBLTEXTFILE:
case HiveParser.TOK_TBLRCFILE:
case HiveParser.TOK_TBLORCFILE:
case HiveParser.TOK_TABLEFILEFORMAT:
storageFormat.fillStorageFormat(child, shared);
localDirectoryDesc.setOutputFormat(storageFormat.outputFormat);
localDirectoryDesc.setSerName(shared.serde);
localDirectoryDescIsSet=true;
break;
}
}
}
if (localDirectoryDescIsSet){
qb.setLocalDirectoryDesc(localDirectoryDesc);
}
break;
}
default:
throw new SemanticException(generateErrorMessage(ast,
"Unknown Token Type " + ast.getToken().getType()));
}
}
} catch (HiveException e) {
// Has to use full name to make sure it does not conflict with
// org.apache.commons.lang.StringUtils
LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e));
throw new SemanticException(e.getMessage(), e);
}
}
private void replaceViewReferenceWithDefinition(QB qb, Table tab,
String tab_name, String alias) throws SemanticException {
ParseDriver pd = new ParseDriver();
ASTNode viewTree;
final ASTNodeOrigin viewOrigin = new ASTNodeOrigin("VIEW", tab.getTableName(),
tab.getViewExpandedText(), alias, qb.getParseInfo().getSrcForAlias(
alias));
try {
String viewText = tab.getViewExpandedText();
// Reparse text, passing null for context to avoid clobbering
// the top-level token stream.
ASTNode tree = pd.parse(viewText, ctx, false);
tree = ParseUtils.findRootNonNullToken(tree);
viewTree = tree;
Dispatcher nodeOriginDispatcher = new Dispatcher() {
@Override
public Object dispatch(Node nd, java.util.Stack<Node> stack,
Object... nodeOutputs) {
((ASTNode) nd).setOrigin(viewOrigin);
return null;
}
};
GraphWalker nodeOriginTagger = new DefaultGraphWalker(
nodeOriginDispatcher);
nodeOriginTagger.startWalking(java.util.Collections
.<Node> singleton(viewTree), null);
} catch (ParseException e) {
// A user could encounter this if a stored view definition contains
// an old SQL construct which has been eliminated in a later Hive
// version, so we need to provide full debugging info to help
// with fixing the view definition.
LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e));
StringBuilder sb = new StringBuilder();
sb.append(e.getMessage());
ErrorMsg.renderOrigin(sb, viewOrigin);
throw new SemanticException(sb.toString(), e);
}
QBExpr qbexpr = new QBExpr(alias);
doPhase1QBExpr(viewTree, qbexpr, qb.getId(), alias);
qb.rewriteViewToSubq(alias, tab_name, qbexpr);
}
private boolean isPresent(String[] list, String elem) {
for (String s : list) {
if (s.toLowerCase().equals(elem)) {
return true;
}
}
return false;
}
/*
* This method is invoked for unqualified column references in join conditions.
* This is passed in the Alias to Operator mapping in the QueryBlock so far.
* We try to resolve the unqualified column against each of the Operator Row Resolvers.
* - if the column is present in only one RowResolver, we treat this as a reference to
* that Operator.
* - if the column resolves with more than one RowResolver, we treat it as an Ambiguous
* reference.
* - if the column doesn't resolve with any RowResolver, we treat this as an Invalid
* reference.
*/
@SuppressWarnings("rawtypes")
private String findAlias(ASTNode columnRef,
Map<String, Operator> aliasToOpInfo) throws SemanticException {
String colName = unescapeIdentifier(columnRef.getChild(0).getText()
.toLowerCase());
String tabAlias = null;
if ( aliasToOpInfo != null ) {
for (Map.Entry<String, Operator> opEntry : aliasToOpInfo.entrySet()) {
Operator op = opEntry.getValue();
RowResolver rr = opParseCtx.get(op).getRowResolver();
ColumnInfo colInfo = rr.get(null, colName);
if (colInfo != null) {
if (tabAlias == null) {
tabAlias = opEntry.getKey();
} else {
throw new SemanticException(
ErrorMsg.AMBIGUOUS_TABLE_ALIAS.getMsg(columnRef.getChild(0)));
}
}
}
}
if ( tabAlias == null ) {
throw new SemanticException(ErrorMsg.INVALID_TABLE_ALIAS.getMsg(columnRef
.getChild(0)));
}
return tabAlias;
}
@SuppressWarnings("nls")
void parseJoinCondPopulateAlias(QBJoinTree joinTree, ASTNode condn,
ArrayList<String> leftAliases, ArrayList<String> rightAliases,
ArrayList<String> fields,
Map<String, Operator> aliasToOpInfo) throws SemanticException {
// String[] allAliases = joinTree.getAllAliases();
switch (condn.getToken().getType()) {
case HiveParser.TOK_TABLE_OR_COL:
String tableOrCol = unescapeIdentifier(condn.getChild(0).getText()
.toLowerCase());
unparseTranslator.addIdentifierTranslation((ASTNode) condn.getChild(0));
if (isPresent(joinTree.getLeftAliases(), tableOrCol)) {
if (!leftAliases.contains(tableOrCol)) {
leftAliases.add(tableOrCol);
}
} else if (isPresent(joinTree.getRightAliases(), tableOrCol)) {
if (!rightAliases.contains(tableOrCol)) {
rightAliases.add(tableOrCol);
}
} else {
tableOrCol = findAlias(condn, aliasToOpInfo);
if (isPresent(joinTree.getLeftAliases(), tableOrCol)) {
if (!leftAliases.contains(tableOrCol)) {
leftAliases.add(tableOrCol);
}
} else {
if (!rightAliases.contains(tableOrCol)) {
rightAliases.add(tableOrCol);
}
}
}
break;
case HiveParser.Identifier:
// it may be a field name, return the identifier and let the caller decide
// whether it is or not
if (fields != null) {
fields
.add(unescapeIdentifier(condn.getToken().getText().toLowerCase()));
}
unparseTranslator.addIdentifierTranslation(condn);
break;
case HiveParser.Number:
case HiveParser.StringLiteral:
case HiveParser.BigintLiteral:
case HiveParser.SmallintLiteral:
case HiveParser.TinyintLiteral:
case HiveParser.DecimalLiteral:
case HiveParser.TOK_STRINGLITERALSEQUENCE:
case HiveParser.TOK_CHARSETLITERAL:
case HiveParser.KW_TRUE:
case HiveParser.KW_FALSE:
break;
case HiveParser.TOK_FUNCTION:
// check all the arguments
for (int i = 1; i < condn.getChildCount(); i++) {
parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(i),
leftAliases, rightAliases, null, aliasToOpInfo);
}
break;
default:
// This is an operator - so check whether it is unary or binary operator
if (condn.getChildCount() == 1) {
parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(0),
leftAliases, rightAliases, null, aliasToOpInfo);
} else if (condn.getChildCount() == 2) {
ArrayList<String> fields1 = null;
// if it is a dot operator, remember the field name of the rhs of the
// left semijoin
if (joinTree.getNoSemiJoin() == false
&& condn.getToken().getType() == HiveParser.DOT) {
// get the semijoin rhs table name and field name
fields1 = new ArrayList<String>();
int rhssize = rightAliases.size();
parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(0),
leftAliases, rightAliases, null, aliasToOpInfo);
String rhsAlias = null;
if (rightAliases.size() > rhssize) { // the new table is rhs table
rhsAlias = rightAliases.get(rightAliases.size() - 1);
}
parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(1),
leftAliases, rightAliases, fields1, aliasToOpInfo);
if (rhsAlias != null && fields1.size() > 0) {
joinTree.addRHSSemijoinColumns(rhsAlias, condn);
}
} else {
parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(0),
leftAliases, rightAliases, null, aliasToOpInfo);
parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(1),
leftAliases, rightAliases, fields1, aliasToOpInfo);
}
} else {
throw new SemanticException(condn.toStringTree() + " encountered with "
+ condn.getChildCount() + " children");
}
break;
}
}
private void populateAliases(List<String> leftAliases,
List<String> rightAliases, ASTNode condn, QBJoinTree joinTree,
List<String> leftSrc) throws SemanticException {
if ((leftAliases.size() != 0) && (rightAliases.size() != 0)) {
throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_1
.getMsg(condn));
}
if (rightAliases.size() != 0) {
assert rightAliases.size() == 1;
joinTree.getExpressions().get(1).add(condn);
} else if (leftAliases.size() != 0) {
joinTree.getExpressions().get(0).add(condn);
for (String s : leftAliases) {
if (!leftSrc.contains(s)) {
leftSrc.add(s);
}
}
} else {
throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_2
.getMsg(condn));
}
}
/*
* refactored out of the Equality case of parseJoinCondition
* so that this can be recursively called on its left tree in the case when
* only left sources are referenced in a Predicate
*/
void applyEqualityPredicateToQBJoinTree(QBJoinTree joinTree,
JoinType type,
List<String> leftSrc,
ASTNode joinCond,
ASTNode leftCondn,
ASTNode rightCondn,
List<String> leftCondAl1,
List<String> leftCondAl2,
List<String> rightCondAl1,
List<String> rightCondAl2) throws SemanticException {
if (leftCondAl1.size() != 0) {
if ((rightCondAl1.size() != 0)
|| ((rightCondAl1.size() == 0) && (rightCondAl2.size() == 0))) {
if (type.equals(JoinType.LEFTOUTER) ||
type.equals(JoinType.FULLOUTER)) {
if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) {
joinTree.getFilters().get(0).add(joinCond);
} else {
LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS);
joinTree.getFiltersForPushing().get(0).add(joinCond);
}
} else {
/*
* If the rhs references table sources and this QBJoinTree has a leftTree;
* hand it to the leftTree and let it recursively handle it.
* There are 3 cases of passing a condition down:
* 1. The leftSide && rightSide don't contains references to the leftTree's rightAlias
* => pass the lists down as is.
* 2. The leftSide contains refs to the leftTree's rightAlias, the rightSide doesn't
* => switch the leftCondAl1 and leftConAl2 lists and pass down.
* 3. The rightSide contains refs to the leftTree's rightAlias, the leftSide doesn't
* => switch the rightCondAl1 and rightConAl2 lists and pass down.
* 4. In case both contain references to the leftTree's rightAlias
* => we cannot push the condition down.
* 5. If either contain references to both left & right
* => we cannot push forward.
*/
if (rightCondAl1.size() != 0) {
QBJoinTree leftTree = joinTree.getJoinSrc();
List<String> leftTreeLeftSrc = new ArrayList<String>();
if (leftTree != null) {
String leftTreeRightSource = leftTree.getRightAliases() != null &&
leftTree.getRightAliases().length > 0 ?
leftTree.getRightAliases()[0] : null;
boolean leftHasRightReference = false;
for (String r : leftCondAl1) {
if (r.equals(leftTreeRightSource)) {
leftHasRightReference = true;
break;
}
}
boolean rightHasRightReference = false;
for (String r : rightCondAl1) {
if (r.equals(leftTreeRightSource)) {
rightHasRightReference = true;
break;
}
}
boolean pushedDown = false;
if ( !leftHasRightReference && !rightHasRightReference ) {
applyEqualityPredicateToQBJoinTree(leftTree, type, leftTreeLeftSrc,
joinCond, leftCondn, rightCondn,
leftCondAl1, leftCondAl2,
rightCondAl1, rightCondAl2);
pushedDown = true;
} else if ( !leftHasRightReference && rightHasRightReference && rightCondAl1.size() == 1 ) {
applyEqualityPredicateToQBJoinTree(leftTree, type, leftTreeLeftSrc,
joinCond, leftCondn, rightCondn,
leftCondAl1, leftCondAl2,
rightCondAl2, rightCondAl1);
pushedDown = true;
} else if (leftHasRightReference && !rightHasRightReference && leftCondAl1.size() == 1 ) {
applyEqualityPredicateToQBJoinTree(leftTree, type, leftTreeLeftSrc,
joinCond, leftCondn, rightCondn,
leftCondAl2, leftCondAl1,
rightCondAl1, rightCondAl2);
pushedDown = true;
}
if (leftTreeLeftSrc.size() == 1) {
leftTree.setLeftAlias(leftTreeLeftSrc.get(0));
}
if ( pushedDown) {
return;
}
} // leftTree != null
}
joinTree.getFiltersForPushing().get(0).add(joinCond);
}
} else if (rightCondAl2.size() != 0) {
populateAliases(leftCondAl1, leftCondAl2, leftCondn, joinTree,
leftSrc);
populateAliases(rightCondAl1, rightCondAl2, rightCondn, joinTree,
leftSrc);
boolean nullsafe = joinCond.getToken().getType() == HiveParser.EQUAL_NS;
joinTree.getNullSafes().add(nullsafe);
}
} else if (leftCondAl2.size() != 0) {
if ((rightCondAl2.size() != 0)
|| ((rightCondAl1.size() == 0) && (rightCondAl2.size() == 0))) {
if (type.equals(JoinType.RIGHTOUTER)
|| type.equals(JoinType.FULLOUTER)) {
if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) {
joinTree.getFilters().get(1).add(joinCond);
} else {
LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS);
joinTree.getFiltersForPushing().get(1).add(joinCond);
}
} else {
joinTree.getFiltersForPushing().get(1).add(joinCond);
}
} else if (rightCondAl1.size() != 0) {
populateAliases(leftCondAl1, leftCondAl2, leftCondn, joinTree,
leftSrc);
populateAliases(rightCondAl1, rightCondAl2, rightCondn, joinTree,
leftSrc);
boolean nullsafe = joinCond.getToken().getType() == HiveParser.EQUAL_NS;
joinTree.getNullSafes().add(nullsafe);
}
} else if (rightCondAl1.size() != 0) {
if (type.equals(JoinType.LEFTOUTER)
|| type.equals(JoinType.FULLOUTER)) {
if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) {
joinTree.getFilters().get(0).add(joinCond);
} else {
LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS);
joinTree.getFiltersForPushing().get(0).add(joinCond);
}
} else {
joinTree.getFiltersForPushing().get(0).add(joinCond);
}
} else {
if (type.equals(JoinType.RIGHTOUTER)
|| type.equals(JoinType.FULLOUTER)) {
if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) {
joinTree.getFilters().get(1).add(joinCond);
} else {
LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS);
joinTree.getFiltersForPushing().get(1).add(joinCond);
}
} else {
joinTree.getFiltersForPushing().get(1).add(joinCond);
}
}
}
@SuppressWarnings("rawtypes")
private void parseJoinCondition(QBJoinTree joinTree, ASTNode joinCond, List<String> leftSrc,
Map<String, Operator> aliasToOpInfo)
throws SemanticException {
if (joinCond == null) {
return;
}
JoinCond cond = joinTree.getJoinCond()[0];
JoinType type = cond.getJoinType();
parseJoinCondition(joinTree, joinCond, leftSrc, type, aliasToOpInfo);
List<ArrayList<ASTNode>> filters = joinTree.getFilters();
if (type == JoinType.LEFTOUTER || type == JoinType.FULLOUTER) {
joinTree.addFilterMapping(cond.getLeft(), cond.getRight(), filters.get(0).size());
}
if (type == JoinType.RIGHTOUTER || type == JoinType.FULLOUTER) {
joinTree.addFilterMapping(cond.getRight(), cond.getLeft(), filters.get(1).size());
}
}
/**
* Parse the join condition. If the condition is a join condition, throw an
* error if it is not an equality. Otherwise, break it into left and right
* expressions and store in the join tree. If the condition is a join filter,
* add it to the filter list of join tree. The join condition can contains
* conditions on both the left and tree trees and filters on either.
* Currently, we only support equi-joins, so we throw an error if the
* condition involves both subtrees and is not a equality. Also, we only
* support AND i.e ORs are not supported currently as their semantics are not
* very clear, may lead to data explosion and there is no usecase.
*
* @param joinTree
* jointree to be populated
* @param joinCond
* join condition
* @param leftSrc
* left sources
* @throws SemanticException
*/
@SuppressWarnings("rawtypes")
private void parseJoinCondition(QBJoinTree joinTree, ASTNode joinCond,
List<String> leftSrc, JoinType type,
Map<String, Operator> aliasToOpInfo) throws SemanticException {
if (joinCond == null) {
return;
}
switch (joinCond.getToken().getType()) {
case HiveParser.KW_OR:
throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_3
.getMsg(joinCond));
case HiveParser.KW_AND:
parseJoinCondition(joinTree, (ASTNode) joinCond.getChild(0), leftSrc, type, aliasToOpInfo);
parseJoinCondition(joinTree, (ASTNode) joinCond.getChild(1), leftSrc, type, aliasToOpInfo);
break;
case HiveParser.EQUAL_NS:
case HiveParser.EQUAL:
ASTNode leftCondn = (ASTNode) joinCond.getChild(0);
ArrayList<String> leftCondAl1 = new ArrayList<String>();
ArrayList<String> leftCondAl2 = new ArrayList<String>();
parseJoinCondPopulateAlias(joinTree, leftCondn, leftCondAl1, leftCondAl2,
null, aliasToOpInfo);
ASTNode rightCondn = (ASTNode) joinCond.getChild(1);
ArrayList<String> rightCondAl1 = new ArrayList<String>();
ArrayList<String> rightCondAl2 = new ArrayList<String>();
parseJoinCondPopulateAlias(joinTree, rightCondn, rightCondAl1,
rightCondAl2, null, aliasToOpInfo);
// is it a filter or a join condition
// if it is filter see if it can be pushed above the join
// filter cannot be pushed if
// * join is full outer or
// * join is left outer and filter is on left alias or
// * join is right outer and filter is on right alias
if (((leftCondAl1.size() != 0) && (leftCondAl2.size() != 0))
|| ((rightCondAl1.size() != 0) && (rightCondAl2.size() != 0))) {
throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_1
.getMsg(joinCond));
}
applyEqualityPredicateToQBJoinTree(joinTree, type, leftSrc,
joinCond, leftCondn, rightCondn,
leftCondAl1, leftCondAl2,
rightCondAl1, rightCondAl2);
break;
default:
boolean isFunction = (joinCond.getType() == HiveParser.TOK_FUNCTION);
// Create all children
int childrenBegin = (isFunction ? 1 : 0);
ArrayList<ArrayList<String>> leftAlias = new ArrayList<ArrayList<String>>(
joinCond.getChildCount() - childrenBegin);
ArrayList<ArrayList<String>> rightAlias = new ArrayList<ArrayList<String>>(
joinCond.getChildCount() - childrenBegin);
for (int ci = 0; ci < joinCond.getChildCount() - childrenBegin; ci++) {
ArrayList<String> left = new ArrayList<String>();
ArrayList<String> right = new ArrayList<String>();
leftAlias.add(left);
rightAlias.add(right);
}
for (int ci = childrenBegin; ci < joinCond.getChildCount(); ci++) {
parseJoinCondPopulateAlias(joinTree, (ASTNode) joinCond.getChild(ci),
leftAlias.get(ci - childrenBegin), rightAlias.get(ci
- childrenBegin), null, aliasToOpInfo);
}
boolean leftAliasNull = true;
for (ArrayList<String> left : leftAlias) {
if (left.size() != 0) {
leftAliasNull = false;
break;
}
}
boolean rightAliasNull = true;
for (ArrayList<String> right : rightAlias) {
if (right.size() != 0) {
rightAliasNull = false;
break;
}
}
if (!leftAliasNull && !rightAliasNull) {
throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_1
.getMsg(joinCond));
}
if (!leftAliasNull) {
if (type.equals(JoinType.LEFTOUTER)
|| type.equals(JoinType.FULLOUTER)) {
if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) {
joinTree.getFilters().get(0).add(joinCond);
} else {
LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS);
joinTree.getFiltersForPushing().get(0).add(joinCond);
}
} else {
joinTree.getFiltersForPushing().get(0).add(joinCond);
}
} else {
if (type.equals(JoinType.RIGHTOUTER)
|| type.equals(JoinType.FULLOUTER)) {
if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) {
joinTree.getFilters().get(1).add(joinCond);
} else {
LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS);
joinTree.getFiltersForPushing().get(1).add(joinCond);
}
} else {
joinTree.getFiltersForPushing().get(1).add(joinCond);
}
}
break;
}
}
@SuppressWarnings("rawtypes")
private void extractJoinCondsFromWhereClause(QBJoinTree joinTree, QB qb, String dest, ASTNode predicate,
Map<String, Operator> aliasToOpInfo) throws SemanticException {
switch (predicate.getType()) {
case HiveParser.KW_AND:
extractJoinCondsFromWhereClause(joinTree, qb, dest,
(ASTNode) predicate.getChild(0), aliasToOpInfo);
extractJoinCondsFromWhereClause(joinTree, qb, dest,
(ASTNode) predicate.getChild(1), aliasToOpInfo);
break;
case HiveParser.EQUAL_NS:
case HiveParser.EQUAL:
ASTNode leftCondn = (ASTNode) predicate.getChild(0);
ArrayList<String> leftCondAl1 = new ArrayList<String>();
ArrayList<String> leftCondAl2 = new ArrayList<String>();
try {
parseJoinCondPopulateAlias(joinTree, leftCondn, leftCondAl1, leftCondAl2,
null, aliasToOpInfo);
} catch(SemanticException se) {
// suppress here; if it is a real issue will get caught in where clause handling.
return;
}
ASTNode rightCondn = (ASTNode) predicate.getChild(1);
ArrayList<String> rightCondAl1 = new ArrayList<String>();
ArrayList<String> rightCondAl2 = new ArrayList<String>();
try {
parseJoinCondPopulateAlias(joinTree, rightCondn, rightCondAl1,
rightCondAl2, null, aliasToOpInfo);
} catch(SemanticException se) {
// suppress here; if it is a real issue will get caught in where clause handling.
return;
}
if (((leftCondAl1.size() != 0) && (leftCondAl2.size() != 0))
|| ((rightCondAl1.size() != 0) && (rightCondAl2.size() != 0))) {
// this is not a join condition.
return;
}
if (((leftCondAl1.size() == 0) && (leftCondAl2.size() == 0))
|| ((rightCondAl1.size() == 0) && (rightCondAl2.size() == 0))) {
// this is not a join condition. Will get handled by predicate pushdown.
return;
}
List<String> leftSrc = new ArrayList<String>();
JoinCond cond = joinTree.getJoinCond()[0];
JoinType type = cond.getJoinType();
applyEqualityPredicateToQBJoinTree(joinTree, type, leftSrc,
predicate, leftCondn, rightCondn,
leftCondAl1, leftCondAl2,
rightCondAl1, rightCondAl2);
if (leftSrc.size() == 1) {
joinTree.setLeftAlias(leftSrc.get(0));
}
// todo: hold onto this predicate, so that we don't add it to the Filter Operator.
break;
default:
return;
}
}
@SuppressWarnings("nls")
public <T extends OperatorDesc> Operator<T> putOpInsertMap(Operator<T> op,
RowResolver rr) {
OpParseContext ctx = new OpParseContext(rr);
opParseCtx.put(op, ctx);
op.augmentPlan();
return op;
}
@SuppressWarnings("nls")
private Operator genHavingPlan(String dest, QB qb, Operator input,
Map<String, Operator> aliasToOpInfo)
throws SemanticException {
ASTNode havingExpr = qb.getParseInfo().getHavingForClause(dest);
OpParseContext inputCtx = opParseCtx.get(input);
RowResolver inputRR = inputCtx.getRowResolver();
Map<ASTNode, String> exprToColumnAlias = qb.getParseInfo().getAllExprToColumnAlias();
for (ASTNode astNode : exprToColumnAlias.keySet()) {
if (inputRR.getExpression(astNode) != null) {
inputRR.put("", exprToColumnAlias.get(astNode), inputRR.getExpression(astNode));
}
}
ASTNode condn = (ASTNode) havingExpr.getChild(0);
/*
* Now a having clause can contain a SubQuery predicate;
* so we invoke genFilterPlan to handle SubQuery algebraic transformation,
* just as is done for SubQuery predicates appearing in the Where Clause.
*/
Operator output = genFilterPlan(condn, qb, input, aliasToOpInfo, true);
output = putOpInsertMap(output, inputRR);
return output;
}
private Operator genPlanForSubQueryPredicate(
QB qbSQ,
ISubQueryJoinInfo subQueryPredicate) throws SemanticException {
qbSQ.setSubQueryDef(subQueryPredicate.getSubQuery());
Phase1Ctx ctx_1 = initPhase1Ctx();
doPhase1(subQueryPredicate.getSubQueryAST(), qbSQ, ctx_1);
getMetaData(qbSQ);
Operator op = genPlan(qbSQ);
return op;
}
@SuppressWarnings("nls")
private Operator genFilterPlan(ASTNode searchCond, QB qb, Operator input,
Map<String, Operator> aliasToOpInfo,
boolean forHavingClause)
throws SemanticException {
OpParseContext inputCtx = opParseCtx.get(input);
RowResolver inputRR = inputCtx.getRowResolver();
/*
* Handling of SubQuery Expressions:
* if "Where clause contains no SubQuery expressions" then
* -->[true] ===CONTINUE_FILTER_PROCESSING===
* else
* -->[false] "extract SubQuery expressions\n from Where clause"
* if "this is a nested SubQuery or \nthere are more than 1 SubQuery expressions" then
* -->[yes] "throw Unsupported Error"
* else
* --> "Rewrite Search condition to \nremove SubQuery predicate"
* --> "build QBSubQuery"
* --> "extract correlated predicates \nfrom Where Clause"
* --> "add correlated Items to \nSelect List and Group By"
* --> "construct Join Predicate \nfrom correlation predicates"
* --> "Generate Plan for\n modified SubQuery"
* --> "Build the Join Condition\n for Parent Query to SubQuery join"
* --> "Build the QBJoinTree from the Join condition"
* --> "Update Parent Query Filter\n with any Post Join conditions"
* --> ===CONTINUE_FILTER_PROCESSING===
* endif
* endif
*
* Support for Sub Queries in Having Clause:
* - By and large this works the same way as SubQueries in the Where Clause.
* - The one addum is the handling of aggregation expressions from the Outer Query
* appearing in correlation clauses.
* - So such correlating predicates are allowed:
* min(OuterQuert.x) = SubQuery.y
* - this requires special handling when converting to joins. See QBSubQuery.rewrite
* method method for detailed comments.
*/
List<ASTNode> subQueriesInOriginalTree = SubQueryUtils.findSubQueries(searchCond);
if ( subQueriesInOriginalTree.size() > 0 ) {
/*
* Restriction.9.m :: disallow nested SubQuery expressions.
*/
if (qb.getSubQueryPredicateDef() != null ) {
throw new SemanticException(ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg(
subQueriesInOriginalTree.get(0), "Nested SubQuery expressions are not supported."));
}
/*
* Restriction.8.m :: We allow only 1 SubQuery expression per Query.
*/
if (subQueriesInOriginalTree.size() > 1 ) {
throw new SemanticException(ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg(
subQueriesInOriginalTree.get(1), "Only 1 SubQuery expression is supported."));
}
/*
* Clone the Search AST; apply all rewrites on the clone.
*/
ASTNode clonedSearchCond = (ASTNode) SubQueryUtils.adaptor.dupTree(searchCond);
List<ASTNode> subQueries = SubQueryUtils.findSubQueries(clonedSearchCond);
for(int i=0; i < subQueries.size(); i++) {
ASTNode subQueryAST = subQueries.get(i);
ASTNode originalSubQueryAST = subQueriesInOriginalTree.get(i);
int sqIdx = qb.incrNumSubQueryPredicates();
clonedSearchCond = SubQueryUtils.rewriteParentQueryWhere(clonedSearchCond, subQueryAST);
QBSubQuery subQuery = SubQueryUtils.buildSubQuery(qb.getId(),
sqIdx, subQueryAST, originalSubQueryAST, ctx);
if ( !forHavingClause ) {
qb.setWhereClauseSubQueryPredicate(subQuery);
} else {
qb.setHavingClauseSubQueryPredicate(subQuery);
}
String havingInputAlias = null;
if ( forHavingClause ) {
havingInputAlias = "gby_sq" + sqIdx;
aliasToOpInfo.put(havingInputAlias, input);
}
subQuery.validateAndRewriteAST(inputRR, forHavingClause, havingInputAlias, aliasToOpInfo.keySet());
QB qbSQ = new QB(subQuery.getOuterQueryId(), subQuery.getAlias(), true);
Operator sqPlanTopOp = genPlanForSubQueryPredicate(qbSQ, subQuery);
aliasToOpInfo.put(subQuery.getAlias(), sqPlanTopOp);
RowResolver sqRR = opParseCtx.get(sqPlanTopOp).getRowResolver();
/*
* Check.5.h :: For In and Not In the SubQuery must implicitly or
* explicitly only contain one select item.
*/
if ( subQuery.getOperator().getType() != SubQueryType.EXISTS &&
subQuery.getOperator().getType() != SubQueryType.NOT_EXISTS &&
sqRR.getColumnInfos().size() -
subQuery.getNumOfCorrelationExprsAddedToSQSelect() > 1 ) {
throw new SemanticException(ErrorMsg.INVALID_SUBQUERY_EXPRESSION.getMsg(
subQueryAST, "SubQuery can contain only 1 item in Select List."));
}
/*
* If this is a Not In SubQuery Predicate then Join in the Null Check SubQuery.
* See QBSubQuery.NotInCheck for details on why and how this is constructed.
*/
if ( subQuery.getNotInCheck() != null ) {
QBSubQuery.NotInCheck notInCheck = subQuery.getNotInCheck();
notInCheck.setSQRR(sqRR);
QB qbSQ_nic = new QB(subQuery.getOuterQueryId(), notInCheck.getAlias(), true);
Operator sqnicPlanTopOp = genPlanForSubQueryPredicate(qbSQ_nic, notInCheck);
aliasToOpInfo.put(notInCheck.getAlias(), sqnicPlanTopOp);
QBJoinTree joinTree_nic = genSQJoinTree(qb, notInCheck,
input,
aliasToOpInfo);
pushJoinFilters(qb, joinTree_nic, aliasToOpInfo, false);
input = genJoinOperator(qbSQ_nic, joinTree_nic, aliasToOpInfo, input);
inputRR = opParseCtx.get(input).getRowResolver();
if ( forHavingClause ) {
aliasToOpInfo.put(havingInputAlias, input);
}
}
/*
* Gen Join between outer Operator and SQ op
*/
subQuery.buildJoinCondition(inputRR, sqRR, forHavingClause, havingInputAlias);
QBJoinTree joinTree = genSQJoinTree(qb, subQuery,
input,
aliasToOpInfo);
/*
* push filters only for this QBJoinTree. Child QBJoinTrees have already been handled.
*/
pushJoinFilters(qb, joinTree, aliasToOpInfo, false);
input = genJoinOperator(qbSQ, joinTree, aliasToOpInfo, input);
searchCond = subQuery.updateOuterQueryFilter(clonedSearchCond);
}
}
return genFilterPlan(qb, searchCond, input);
}
/**
* create a filter plan. The condition and the inputs are specified.
*
* @param qb
* current query block
* @param condn
* The condition to be resolved
* @param input
* the input operator
*/
@SuppressWarnings("nls")
private Operator genFilterPlan(QB qb, ASTNode condn, Operator input)
throws SemanticException {
OpParseContext inputCtx = opParseCtx.get(input);
RowResolver inputRR = inputCtx.getRowResolver();
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
new FilterDesc(genExprNodeDesc(condn, inputRR), false), new RowSchema(
inputRR.getColumnInfos()), input), inputRR);
if (LOG.isDebugEnabled()) {
LOG.debug("Created Filter Plan for " + qb.getId() + " row schema: "
+ inputRR.toString());
}
return output;
}
/*
* for inner joins push a 'is not null predicate' to the join sources for
* every non nullSafe predicate.
*/
private Operator genNotNullFilterForJoinSourcePlan(QB qb, Operator input,
QBJoinTree joinTree, ExprNodeDesc[] joinKeys) throws SemanticException {
if (qb == null || joinTree == null) {
return input;
}
if (!joinTree.getNoOuterJoin()) {
return input;
}
if (joinKeys == null || joinKeys.length == 0) {
return input;
}
ExprNodeDesc filterPred = null;
List<Boolean> nullSafes = joinTree.getNullSafes();
for (int i = 0; i < joinKeys.length; i++) {
if ( nullSafes.get(i)) {
continue;
}
List<ExprNodeDesc> args = new ArrayList<ExprNodeDesc>();
args.add(joinKeys[i]);
ExprNodeDesc nextExpr = ExprNodeGenericFuncDesc.newInstance(
FunctionRegistry.getFunctionInfo("isnotnull").getGenericUDF(), args);
filterPred = filterPred == null ? nextExpr : ExprNodeDescUtils
.mergePredicates(filterPred, nextExpr);
}
if (filterPred == null) {
return input;
}
OpParseContext inputCtx = opParseCtx.get(input);
RowResolver inputRR = inputCtx.getRowResolver();
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
new FilterDesc(filterPred, false),
new RowSchema(inputRR.getColumnInfos()), input), inputRR);
if (LOG.isDebugEnabled()) {
LOG.debug("Created Filter Plan for " + qb.getId() + " row schema: "
+ inputRR.toString());
}
return output;
}
@SuppressWarnings("nls")
private Integer genColListRegex(String colRegex, String tabAlias,
ASTNode sel, ArrayList<ExprNodeDesc> col_list,
RowResolver input, Integer pos, RowResolver output, List<String> aliases, boolean subQuery)
throws SemanticException {
// The table alias should exist
if (tabAlias != null && !input.hasTableAlias(tabAlias)) {
throw new SemanticException(ErrorMsg.INVALID_TABLE_ALIAS.getMsg(sel));
}
// TODO: Have to put in the support for AS clause
Pattern regex = null;
try {
regex = Pattern.compile(colRegex, Pattern.CASE_INSENSITIVE);
} catch (PatternSyntaxException e) {
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(sel, e
.getMessage()));
}
StringBuilder replacementText = new StringBuilder();
int matched = 0;
// add empty string to the list of aliases. Some operators (ex. GroupBy) add
// ColumnInfos for table alias "".
if (!aliases.contains("")) {
aliases.add("");
}
/*
* track the input ColumnInfos that are added to the output.
* if a columnInfo has multiple mappings; then add the column only once,
* but carry the mappings forward.
*/
Map<ColumnInfo, ColumnInfo> inputColsProcessed = new HashMap<ColumnInfo, ColumnInfo>();
// For expr "*", aliases should be iterated in the order they are specified
// in the query.
for (String alias : aliases) {
HashMap<String, ColumnInfo> fMap = input.getFieldMap(alias);
if (fMap == null) {
continue;
}
// For the tab.* case, add all the columns to the fieldList
// from the input schema
for (Map.Entry<String, ColumnInfo> entry : fMap.entrySet()) {
ColumnInfo colInfo = entry.getValue();
String name = colInfo.getInternalName();
String[] tmp = input.reverseLookup(name);
// Skip the colinfos which are not for this particular alias
if (tabAlias != null && !tmp[0].equalsIgnoreCase(tabAlias)) {
continue;
}
if (colInfo.getIsVirtualCol() && colInfo.isHiddenVirtualCol()) {
continue;
}
// Not matching the regex?
if (!regex.matcher(tmp[1]).matches()) {
continue;
}
if (subQuery) {
output.checkColumn(tmp[0], tmp[1]);
}
ColumnInfo oColInfo = inputColsProcessed.get(colInfo);
if (oColInfo == null) {
ExprNodeColumnDesc expr = new ExprNodeColumnDesc(colInfo.getType(),
name, colInfo.getTabAlias(), colInfo.getIsVirtualCol(),
colInfo.isSkewedCol());
col_list.add(expr);
oColInfo = new ColumnInfo(getColumnInternalName(pos),
colInfo.getType(), colInfo.getTabAlias(),
colInfo.getIsVirtualCol(), colInfo.isHiddenVirtualCol());
inputColsProcessed.put(colInfo, oColInfo);
}
output.put(tmp[0], tmp[1], oColInfo);
pos = Integer.valueOf(pos.intValue() + 1);
matched++;
if (unparseTranslator.isEnabled()) {
if (replacementText.length() > 0) {
replacementText.append(", ");
}
replacementText.append(HiveUtils.unparseIdentifier(tmp[0], conf));
replacementText.append(".");
replacementText.append(HiveUtils.unparseIdentifier(tmp[1], conf));
}
}
}
if (matched == 0) {
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(sel));
}
if (unparseTranslator.isEnabled()) {
unparseTranslator.addTranslation(sel, replacementText.toString());
}
return pos;
}
public static String getColumnInternalName(int pos) {
return HiveConf.getColumnInternalName(pos);
}
private String getScriptProgName(String cmd) {
int end = cmd.indexOf(" ");
return (end == -1) ? cmd : cmd.substring(0, end);
}
private String getScriptArgs(String cmd) {
int end = cmd.indexOf(" ");
return (end == -1) ? "" : cmd.substring(end, cmd.length());
}
private static int getPositionFromInternalName(String internalName) {
return HiveConf.getPositionFromInternalName(internalName);
}
private String fetchFilesNotInLocalFilesystem(String cmd) {
SessionState ss = SessionState.get();
String progName = getScriptProgName(cmd);
if (SessionState.canDownloadResource(progName)) {
String filePath = ss.add_resource(ResourceType.FILE, progName, true);
Path p = new Path(filePath);
String fileName = p.getName();
String scriptArgs = getScriptArgs(cmd);
String finalCmd = fileName + scriptArgs;
return finalCmd;
}
return cmd;
}
private TableDesc getTableDescFromSerDe(ASTNode child, String cols,
String colTypes, boolean defaultCols) throws SemanticException {
if (child.getType() == HiveParser.TOK_SERDENAME) {
String serdeName = unescapeSQLString(child.getChild(0).getText());
Class<? extends Deserializer> serdeClass = null;
try {
serdeClass = (Class<? extends Deserializer>) Class.forName(serdeName,
true, JavaUtils.getClassLoader());
} catch (ClassNotFoundException e) {
throw new SemanticException(e);
}
TableDesc tblDesc = PlanUtils.getTableDesc(serdeClass, Integer
.toString(Utilities.tabCode), cols, colTypes, defaultCols);
// copy all the properties
if (child.getChildCount() == 2) {
ASTNode prop = (ASTNode) ((ASTNode) child.getChild(1)).getChild(0);
for (int propChild = 0; propChild < prop.getChildCount(); propChild++) {
String key = unescapeSQLString(prop.getChild(propChild).getChild(0)
.getText());
String value = unescapeSQLString(prop.getChild(propChild).getChild(1)
.getText());
tblDesc.getProperties().setProperty(key, value);
}
}
return tblDesc;
} else if (child.getType() == HiveParser.TOK_SERDEPROPS) {
TableDesc tblDesc = PlanUtils.getDefaultTableDesc(Integer
.toString(Utilities.ctrlaCode), cols, colTypes, defaultCols);
int numChildRowFormat = child.getChildCount();
for (int numC = 0; numC < numChildRowFormat; numC++) {
ASTNode rowChild = (ASTNode) child.getChild(numC);
switch (rowChild.getToken().getType()) {
case HiveParser.TOK_TABLEROWFORMATFIELD:
String fieldDelim = unescapeSQLString(rowChild.getChild(0).getText());
tblDesc.getProperties()
.setProperty(serdeConstants.FIELD_DELIM, fieldDelim);
tblDesc.getProperties().setProperty(serdeConstants.SERIALIZATION_FORMAT,
fieldDelim);
if (rowChild.getChildCount() >= 2) {
String fieldEscape = unescapeSQLString(rowChild.getChild(1)
.getText());
tblDesc.getProperties().setProperty(serdeConstants.ESCAPE_CHAR,
fieldEscape);
}
break;
case HiveParser.TOK_TABLEROWFORMATCOLLITEMS:
tblDesc.getProperties().setProperty(serdeConstants.COLLECTION_DELIM,
unescapeSQLString(rowChild.getChild(0).getText()));
break;
case HiveParser.TOK_TABLEROWFORMATMAPKEYS:
tblDesc.getProperties().setProperty(serdeConstants.MAPKEY_DELIM,
unescapeSQLString(rowChild.getChild(0).getText()));
break;
case HiveParser.TOK_TABLEROWFORMATLINES:
String lineDelim = unescapeSQLString(rowChild.getChild(0).getText());
tblDesc.getProperties().setProperty(serdeConstants.LINE_DELIM, lineDelim);
if (!lineDelim.equals("\n") && !lineDelim.equals("10")) {
throw new SemanticException(generateErrorMessage(rowChild,
ErrorMsg.LINES_TERMINATED_BY_NON_NEWLINE.getMsg()));
}
case HiveParser.TOK_TABLEROWFORMATNULL:
String nullFormat = unescapeSQLString(rowChild.getChild(0).getText());
tblDesc.getProperties().setProperty(serdeConstants.SERIALIZATION_NULL_FORMAT,
nullFormat);
break;
default:
assert false;
}
}
return tblDesc;
}
// should never come here
return null;
}
private void failIfColAliasExists(Set<String> nameSet, String name)
throws SemanticException {
if (nameSet.contains(name)) {
throw new SemanticException(ErrorMsg.COLUMN_ALIAS_ALREADY_EXISTS
.getMsg(name));
}
nameSet.add(name);
}
@SuppressWarnings("nls")
private Operator genScriptPlan(ASTNode trfm, QB qb, Operator input)
throws SemanticException {
// If there is no "AS" clause, the output schema will be "key,value"
ArrayList<ColumnInfo> outputCols = new ArrayList<ColumnInfo>();
int inputSerDeNum = 1, inputRecordWriterNum = 2;
int outputSerDeNum = 4, outputRecordReaderNum = 5;
int outputColsNum = 6;
boolean outputColNames = false, outputColSchemas = false;
int execPos = 3;
boolean defaultOutputCols = false;
// Go over all the children
if (trfm.getChildCount() > outputColsNum) {
ASTNode outCols = (ASTNode) trfm.getChild(outputColsNum);
if (outCols.getType() == HiveParser.TOK_ALIASLIST) {
outputColNames = true;
} else if (outCols.getType() == HiveParser.TOK_TABCOLLIST) {
outputColSchemas = true;
}
}
// If column type is not specified, use a string
if (!outputColNames && !outputColSchemas) {
String intName = getColumnInternalName(0);
ColumnInfo colInfo = new ColumnInfo(intName,
TypeInfoFactory.stringTypeInfo, null, false);
colInfo.setAlias("key");
outputCols.add(colInfo);
intName = getColumnInternalName(1);
colInfo = new ColumnInfo(intName, TypeInfoFactory.stringTypeInfo, null,
false);
colInfo.setAlias("value");
outputCols.add(colInfo);
defaultOutputCols = true;
} else {
ASTNode collist = (ASTNode) trfm.getChild(outputColsNum);
int ccount = collist.getChildCount();
Set<String> colAliasNamesDuplicateCheck = new HashSet<String>();
if (outputColNames) {
for (int i = 0; i < ccount; ++i) {
String colAlias = unescapeIdentifier(((ASTNode) collist.getChild(i))
.getText());
failIfColAliasExists(colAliasNamesDuplicateCheck, colAlias);
String intName = getColumnInternalName(i);
ColumnInfo colInfo = new ColumnInfo(intName,
TypeInfoFactory.stringTypeInfo, null, false);
colInfo.setAlias(colAlias);
outputCols.add(colInfo);
}
} else {
for (int i = 0; i < ccount; ++i) {
ASTNode child = (ASTNode) collist.getChild(i);
assert child.getType() == HiveParser.TOK_TABCOL;
String colAlias = unescapeIdentifier(((ASTNode) child.getChild(0))
.getText());
failIfColAliasExists(colAliasNamesDuplicateCheck, colAlias);
String intName = getColumnInternalName(i);
ColumnInfo colInfo = new ColumnInfo(intName, TypeInfoUtils
.getTypeInfoFromTypeString(getTypeStringFromAST((ASTNode) child
.getChild(1))), null, false);
colInfo.setAlias(colAlias);
outputCols.add(colInfo);
}
}
}
RowResolver out_rwsch = new RowResolver();
StringBuilder columns = new StringBuilder();
StringBuilder columnTypes = new StringBuilder();
for (int i = 0; i < outputCols.size(); ++i) {
if (i != 0) {
columns.append(",");
columnTypes.append(",");
}
columns.append(outputCols.get(i).getInternalName());
columnTypes.append(outputCols.get(i).getType().getTypeName());
out_rwsch.put(qb.getParseInfo().getAlias(), outputCols.get(i).getAlias(),
outputCols.get(i));
}
StringBuilder inpColumns = new StringBuilder();
StringBuilder inpColumnTypes = new StringBuilder();
ArrayList<ColumnInfo> inputSchema = opParseCtx.get(input).getRowResolver()
.getColumnInfos();
for (int i = 0; i < inputSchema.size(); ++i) {
if (i != 0) {
inpColumns.append(",");
inpColumnTypes.append(",");
}
inpColumns.append(inputSchema.get(i).getInternalName());
inpColumnTypes.append(inputSchema.get(i).getType().getTypeName());
}
TableDesc outInfo;
TableDesc errInfo;
TableDesc inInfo;
String defaultSerdeName = conf.getVar(HiveConf.ConfVars.HIVESCRIPTSERDE);
Class<? extends Deserializer> serde;
try {
serde = (Class<? extends Deserializer>) Class.forName(defaultSerdeName,
true, JavaUtils.getClassLoader());
} catch (ClassNotFoundException e) {
throw new SemanticException(e);
}
int fieldSeparator = Utilities.tabCode;
if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESCRIPTESCAPE)) {
fieldSeparator = Utilities.ctrlaCode;
}
// Input and Output Serdes
if (trfm.getChild(inputSerDeNum).getChildCount() > 0) {
inInfo = getTableDescFromSerDe((ASTNode) (((ASTNode) trfm
.getChild(inputSerDeNum))).getChild(0), inpColumns.toString(),
inpColumnTypes.toString(), false);
} else {
inInfo = PlanUtils.getTableDesc(serde, Integer
.toString(fieldSeparator), inpColumns.toString(), inpColumnTypes
.toString(), false, true);
}
if (trfm.getChild(outputSerDeNum).getChildCount() > 0) {
outInfo = getTableDescFromSerDe((ASTNode) (((ASTNode) trfm
.getChild(outputSerDeNum))).getChild(0), columns.toString(),
columnTypes.toString(), false);
// This is for backward compatibility. If the user did not specify the
// output column list, we assume that there are 2 columns: key and value.
// However, if the script outputs: col1, col2, col3 seperated by TAB, the
// requirement is: key is col and value is (col2 TAB col3)
} else {
outInfo = PlanUtils.getTableDesc(serde, Integer
.toString(fieldSeparator), columns.toString(), columnTypes
.toString(), defaultOutputCols);
}
// Error stream always uses the default serde with a single column
errInfo = PlanUtils.getTableDesc(serde, Integer.toString(Utilities.tabCode), "KEY");
// Output record readers
Class<? extends RecordReader> outRecordReader = getRecordReader((ASTNode) trfm
.getChild(outputRecordReaderNum));
Class<? extends RecordWriter> inRecordWriter = getRecordWriter((ASTNode) trfm
.getChild(inputRecordWriterNum));
Class<? extends RecordReader> errRecordReader = getDefaultRecordReader();
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
new ScriptDesc(
fetchFilesNotInLocalFilesystem(stripQuotes(trfm.getChild(execPos).getText())),
inInfo, inRecordWriter, outInfo, outRecordReader, errRecordReader, errInfo),
new RowSchema(out_rwsch.getColumnInfos()), input), out_rwsch);
output.setColumnExprMap(new HashMap<String, ExprNodeDesc>()); // disable backtracking
return output;
}
private Class<? extends RecordReader> getRecordReader(ASTNode node)
throws SemanticException {
String name;
if (node.getChildCount() == 0) {
name = conf.getVar(HiveConf.ConfVars.HIVESCRIPTRECORDREADER);
} else {
name = unescapeSQLString(node.getChild(0).getText());
}
try {
return (Class<? extends RecordReader>) Class.forName(name, true,
JavaUtils.getClassLoader());
} catch (ClassNotFoundException e) {
throw new SemanticException(e);
}
}
private Class<? extends RecordReader> getDefaultRecordReader()
throws SemanticException {
String name;
name = conf.getVar(HiveConf.ConfVars.HIVESCRIPTRECORDREADER);
try {
return (Class<? extends RecordReader>) Class.forName(name, true,
JavaUtils.getClassLoader());
} catch (ClassNotFoundException e) {
throw new SemanticException(e);
}
}
private Class<? extends RecordWriter> getRecordWriter(ASTNode node)
throws SemanticException {
String name;
if (node.getChildCount() == 0) {
name = conf.getVar(HiveConf.ConfVars.HIVESCRIPTRECORDWRITER);
} else {
name = unescapeSQLString(node.getChild(0).getText());
}
try {
return (Class<? extends RecordWriter>) Class.forName(name, true,
JavaUtils.getClassLoader());
} catch (ClassNotFoundException e) {
throw new SemanticException(e);
}
}
private List<Integer> getGroupingSetsForRollup(int size) {
List<Integer> groupingSetKeys = new ArrayList<Integer>();
for (int i = 0; i <= size; i++) {
groupingSetKeys.add((1 << i) - 1);
}
return groupingSetKeys;
}
private List<Integer> getGroupingSetsForCube(int size) {
int count = 1 << size;
List<Integer> results = new ArrayList<Integer>(count);
for (int i = 0; i < count; ++i) {
results.add(i);
}
return results;
}
// This function returns the grouping sets along with the grouping expressions
// Even if rollups and cubes are present in the query, they are converted to
// grouping sets at this point
private ObjectPair<List<ASTNode>, List<Integer>> getGroupByGroupingSetsForClause(
QBParseInfo parseInfo, String dest) throws SemanticException {
List<Integer> groupingSets = new ArrayList<Integer>();
List<ASTNode> groupByExprs = getGroupByForClause(parseInfo, dest);
if (parseInfo.getDestRollups().contains(dest)) {
groupingSets = getGroupingSetsForRollup(groupByExprs.size());
} else if (parseInfo.getDestCubes().contains(dest)) {
groupingSets = getGroupingSetsForCube(groupByExprs.size());
} else if (parseInfo.getDestGroupingSets().contains(dest)) {
groupingSets = getGroupingSets(groupByExprs, parseInfo, dest);
}
return new ObjectPair<List<ASTNode>, List<Integer>>(groupByExprs, groupingSets);
}
private List<Integer> getGroupingSets(List<ASTNode> groupByExpr, QBParseInfo parseInfo,
String dest) throws SemanticException {
Map<String, Integer> exprPos = new HashMap<String, Integer>();
for (int i = 0; i < groupByExpr.size(); ++i) {
ASTNode node = groupByExpr.get(i);
exprPos.put(node.toStringTree(), i);
}
ASTNode root = parseInfo.getGroupByForClause(dest);
List<Integer> result = new ArrayList<Integer>(root == null ? 0 : root.getChildCount());
if (root != null) {
for (int i = 0; i < root.getChildCount(); ++i) {
ASTNode child = (ASTNode) root.getChild(i);
if (child.getType() != HiveParser.TOK_GROUPING_SETS_EXPRESSION) {
continue;
}
int bitmap = 0;
for (int j = 0; j < child.getChildCount(); ++j) {
String treeAsString = child.getChild(j).toStringTree();
Integer pos = exprPos.get(treeAsString);
if (pos == null) {
throw new SemanticException(
generateErrorMessage((ASTNode) child.getChild(j),
ErrorMsg.HIVE_GROUPING_SETS_EXPR_NOT_IN_GROUPBY.getErrorCodedMsg()));
}
bitmap = setBit(bitmap, pos);
}
result.add(bitmap);
}
}
if (checkForNoAggr(result)) {
throw new SemanticException(
ErrorMsg.HIVE_GROUPING_SETS_AGGR_NOFUNC.getMsg());
}
return result;
}
private boolean checkForNoAggr(List<Integer> bitmaps) {
boolean ret = true;
for (int mask : bitmaps) {
ret &= mask == 0;
}
return ret;
}
private int setBit(int bitmap, int bitIdx) {
return bitmap | (1 << bitIdx);
}
/**
* This function is a wrapper of parseInfo.getGroupByForClause which
* automatically translates SELECT DISTINCT a,b,c to SELECT a,b,c GROUP BY
* a,b,c.
*/
static List<ASTNode> getGroupByForClause(QBParseInfo parseInfo, String dest) {
if (parseInfo.getSelForClause(dest).getToken().getType() == HiveParser.TOK_SELECTDI) {
ASTNode selectExprs = parseInfo.getSelForClause(dest);
List<ASTNode> result = new ArrayList<ASTNode>(selectExprs == null ? 0
: selectExprs.getChildCount());
if (selectExprs != null) {
HashMap<String, ASTNode> windowingExprs = parseInfo.getWindowingExprsForClause(dest);
for (int i = 0; i < selectExprs.getChildCount(); ++i) {
if (((ASTNode) selectExprs.getChild(i)).getToken().getType() == HiveParser.TOK_HINTLIST) {
continue;
}
// table.column AS alias
ASTNode grpbyExpr = (ASTNode) selectExprs.getChild(i).getChild(0);
/*
* If this is handled by Windowing then ignore it.
*/
if (windowingExprs != null && windowingExprs.containsKey(grpbyExpr.toStringTree())) {
continue;
}
result.add(grpbyExpr);
}
}
return result;
} else {
ASTNode grpByExprs = parseInfo.getGroupByForClause(dest);
List<ASTNode> result = new ArrayList<ASTNode>(grpByExprs == null ? 0
: grpByExprs.getChildCount());
if (grpByExprs != null) {
for (int i = 0; i < grpByExprs.getChildCount(); ++i) {
ASTNode grpbyExpr = (ASTNode) grpByExprs.getChild(i);
if (grpbyExpr.getType() != HiveParser.TOK_GROUPING_SETS_EXPRESSION) {
result.add(grpbyExpr);
}
}
}
return result;
}
}
private static String[] getColAlias(ASTNode selExpr, String defaultName,
RowResolver inputRR, boolean includeFuncName, int colNum) {
String colAlias = null;
String tabAlias = null;
String[] colRef = new String[2];
//for queries with a windowing expressions, the selexpr may have a third child
if (selExpr.getChildCount() == 2 ||
(selExpr.getChildCount() == 3 &&
selExpr.getChild(2).getType() == HiveParser.TOK_WINDOWSPEC)) {
// return zz for "xx + yy AS zz"
colAlias = unescapeIdentifier(selExpr.getChild(1).getText());
colRef[0] = tabAlias;
colRef[1] = colAlias;
return colRef;
}
ASTNode root = (ASTNode) selExpr.getChild(0);
if (root.getType() == HiveParser.TOK_TABLE_OR_COL) {
colAlias =
BaseSemanticAnalyzer.unescapeIdentifier(root.getChild(0).getText());
colRef[0] = tabAlias;
colRef[1] = colAlias;
return colRef;
}
if (root.getType() == HiveParser.DOT) {
ASTNode tab = (ASTNode) root.getChild(0);
if (tab.getType() == HiveParser.TOK_TABLE_OR_COL) {
String t = unescapeIdentifier(tab.getChild(0).getText());
if (inputRR.hasTableAlias(t)) {
tabAlias = t;
}
}
// Return zz for "xx.zz" and "xx.yy.zz"
ASTNode col = (ASTNode) root.getChild(1);
if (col.getType() == HiveParser.Identifier) {
colAlias = unescapeIdentifier(col.getText());
}
}
// if specified generate alias using func name
if (includeFuncName && (root.getType() == HiveParser.TOK_FUNCTION)) {
String expr_flattened = root.toStringTree();
// remove all TOK tokens
String expr_no_tok = expr_flattened.replaceAll("TOK_\\S+", "");
// remove all non alphanumeric letters, replace whitespace spans with underscore
String expr_formatted = expr_no_tok.replaceAll("\\W", " ").trim().replaceAll("\\s+", "_");
// limit length to 20 chars
if (expr_formatted.length() > AUTOGEN_COLALIAS_PRFX_MAXLENGTH) {
expr_formatted = expr_formatted.substring(0, AUTOGEN_COLALIAS_PRFX_MAXLENGTH);
}
// append colnum to make it unique
colAlias = expr_formatted.concat("_" + colNum);
}
if (colAlias == null) {
// Return defaultName if selExpr is not a simple xx.yy.zz
colAlias = defaultName + colNum;
}
colRef[0] = tabAlias;
colRef[1] = colAlias;
return colRef;
}
/**
* Returns whether the pattern is a regex expression (instead of a normal
* string). Normal string is a string with all alphabets/digits and "_".
*/
private static boolean isRegex(String pattern, HiveConf conf) {
String qIdSupport = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_QUOTEDID_SUPPORT);
if ( "column".equals(qIdSupport)) {
return false;
}
for (int i = 0; i < pattern.length(); i++) {
if (!Character.isLetterOrDigit(pattern.charAt(i))
&& pattern.charAt(i) != '_') {
return true;
}
}
return false;
}
private Operator<?> genSelectPlan(String dest, QB qb, Operator<?> input)
throws SemanticException {
ASTNode selExprList = qb.getParseInfo().getSelForClause(dest);
Operator<?> op = genSelectPlan(selExprList, qb, input, false);
if (LOG.isDebugEnabled()) {
LOG.debug("Created Select Plan for clause: " + dest);
}
return op;
}
@SuppressWarnings("nls")
private Operator<?> genSelectPlan(ASTNode selExprList, QB qb,
Operator<?> input, boolean outerLV) throws SemanticException {
if (LOG.isDebugEnabled()) {
LOG.debug("tree: " + selExprList.toStringTree());
}
ArrayList<ExprNodeDesc> col_list = new ArrayList<ExprNodeDesc>();
RowResolver out_rwsch = new RowResolver();
ASTNode trfm = null;
Integer pos = Integer.valueOf(0);
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
// SELECT * or SELECT TRANSFORM(*)
boolean selectStar = false;
int posn = 0;
boolean hintPresent = (selExprList.getChild(0).getType() == HiveParser.TOK_HINTLIST);
if (hintPresent) {
posn++;
}
boolean subQuery = qb.getParseInfo().getIsSubQ();
boolean isInTransform = (selExprList.getChild(posn).getChild(0).getType() ==
HiveParser.TOK_TRANSFORM);
if (isInTransform) {
queryProperties.setUsesScript(true);
globalLimitCtx.setHasTransformOrUDTF(true);
trfm = (ASTNode) selExprList.getChild(posn).getChild(0);
}
// Detect queries of the form SELECT udtf(col) AS ...
// by looking for a function as the first child, and then checking to see
// if the function is a Generic UDTF. It's not as clean as TRANSFORM due to
// the lack of a special token.
boolean isUDTF = false;
String udtfTableAlias = null;
ArrayList<String> udtfColAliases = new ArrayList<String>();
ASTNode udtfExpr = (ASTNode) selExprList.getChild(posn).getChild(0);
GenericUDTF genericUDTF = null;
int udtfExprType = udtfExpr.getType();
if (udtfExprType == HiveParser.TOK_FUNCTION
|| udtfExprType == HiveParser.TOK_FUNCTIONSTAR) {
String funcName = TypeCheckProcFactory.DefaultExprProcessor
.getFunctionText(udtfExpr, true);
FunctionInfo fi = FunctionRegistry.getFunctionInfo(funcName);
if (fi != null) {
genericUDTF = fi.getGenericUDTF();
}
isUDTF = (genericUDTF != null);
if (isUDTF) {
globalLimitCtx.setHasTransformOrUDTF(true);
}
if (isUDTF && !fi.isNative()) {
unparseTranslator.addIdentifierTranslation((ASTNode) udtfExpr
.getChild(0));
}
if (isUDTF && (selectStar = udtfExprType == HiveParser.TOK_FUNCTIONSTAR)) {
genColListRegex(".*", null, (ASTNode) udtfExpr.getChild(0),
col_list, inputRR, pos, out_rwsch, qb.getAliases(), subQuery);
}
}
if (isUDTF) {
// Only support a single expression when it's a UDTF
if (selExprList.getChildCount() > 1) {
throw new SemanticException(generateErrorMessage(
(ASTNode) selExprList.getChild(1),
ErrorMsg.UDTF_MULTIPLE_EXPR.getMsg()));
}
ASTNode selExpr = (ASTNode) selExprList.getChild(posn);
// Get the column / table aliases from the expression. Start from 1 as
// 0 is the TOK_FUNCTION
// column names also can be inferred from result of UDTF
for (int i = 1; i < selExpr.getChildCount(); i++) {
ASTNode selExprChild = (ASTNode) selExpr.getChild(i);
switch (selExprChild.getType()) {
case HiveParser.Identifier:
udtfColAliases.add(unescapeIdentifier(selExprChild.getText()));
unparseTranslator.addIdentifierTranslation(selExprChild);
break;
case HiveParser.TOK_TABALIAS:
assert (selExprChild.getChildCount() == 1);
udtfTableAlias = unescapeIdentifier(selExprChild.getChild(0)
.getText());
qb.addAlias(udtfTableAlias);
unparseTranslator.addIdentifierTranslation((ASTNode) selExprChild
.getChild(0));
break;
default:
assert (false);
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("UDTF table alias is " + udtfTableAlias);
LOG.debug("UDTF col aliases are " + udtfColAliases);
}
}
// The list of expressions after SELECT or SELECT TRANSFORM.
ASTNode exprList;
if (isInTransform) {
exprList = (ASTNode) trfm.getChild(0);
} else if (isUDTF) {
exprList = udtfExpr;
} else {
exprList = selExprList;
}
if (LOG.isDebugEnabled()) {
LOG.debug("genSelectPlan: input = " + inputRR.toString());
}
// For UDTF's, skip the function name to get the expressions
int startPosn = isUDTF ? posn + 1 : posn;
if (isInTransform) {
startPosn = 0;
}
Set<String> colAliases = new HashSet<String>();
ASTNode[] exprs = new ASTNode[exprList.getChildCount()];
String[][] aliases = new String[exprList.getChildCount()][];
boolean[] hasAsClauses = new boolean[exprList.getChildCount()];
// Iterate over all expression (either after SELECT, or in SELECT TRANSFORM)
for (int i = startPosn; i < exprList.getChildCount(); ++i) {
// child can be EXPR AS ALIAS, or EXPR.
ASTNode child = (ASTNode) exprList.getChild(i);
boolean hasAsClause = (!isInTransform) && (child.getChildCount() == 2);
boolean isWindowSpec = child.getChildCount() == 3 &&
child.getChild(2).getType() == HiveParser.TOK_WINDOWSPEC;
// EXPR AS (ALIAS,...) parses, but is only allowed for UDTF's
// This check is not needed and invalid when there is a transform b/c the
// AST's are slightly different.
if (!isWindowSpec && !isInTransform && !isUDTF && child.getChildCount() > 2) {
throw new SemanticException(generateErrorMessage(
(ASTNode) child.getChild(2),
ErrorMsg.INVALID_AS.getMsg()));
}
// The real expression
ASTNode expr;
String tabAlias;
String colAlias;
if (isInTransform || isUDTF) {
tabAlias = null;
colAlias = autogenColAliasPrfxLbl + i;
expr = child;
} else {
// Get rid of TOK_SELEXPR
expr = (ASTNode) child.getChild(0);
String[] colRef = getColAlias(child, autogenColAliasPrfxLbl, inputRR,
autogenColAliasPrfxIncludeFuncName, i);
tabAlias = colRef[0];
colAlias = colRef[1];
if (hasAsClause) {
unparseTranslator.addIdentifierTranslation((ASTNode) child
.getChild(1));
}
}
exprs[i] = expr;
aliases[i] = new String[] {tabAlias, colAlias};
hasAsClauses[i] = hasAsClause;
colAliases.add(colAlias);
}
// Iterate over all expression (either after SELECT, or in SELECT TRANSFORM)
for (int i = startPosn; i < exprList.getChildCount(); ++i) {
// The real expression
ASTNode expr = exprs[i];
String tabAlias = aliases[i][0];
String colAlias = aliases[i][1];
boolean hasAsClause = hasAsClauses[i];
if (expr.getType() == HiveParser.TOK_ALLCOLREF) {
pos = genColListRegex(".*", expr.getChildCount() == 0 ? null
: getUnescapedName((ASTNode) expr.getChild(0)).toLowerCase(),
expr, col_list, inputRR, pos, out_rwsch, qb.getAliases(), subQuery);
selectStar = true;
} else if (expr.getType() == HiveParser.TOK_TABLE_OR_COL && !hasAsClause
&& !inputRR.getIsExprResolver()
&& isRegex(unescapeIdentifier(expr.getChild(0).getText()), conf)) {
// In case the expression is a regex COL.
// This can only happen without AS clause
// We don't allow this for ExprResolver - the Group By case
pos = genColListRegex(unescapeIdentifier(expr.getChild(0).getText()),
null, expr, col_list, inputRR, pos, out_rwsch, qb.getAliases(), subQuery);
} else if (expr.getType() == HiveParser.DOT
&& expr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL
&& inputRR.hasTableAlias(unescapeIdentifier(expr.getChild(0)
.getChild(0).getText().toLowerCase())) && !hasAsClause
&& !inputRR.getIsExprResolver()
&& isRegex(unescapeIdentifier(expr.getChild(1).getText()), conf)) {
// In case the expression is TABLE.COL (col can be regex).
// This can only happen without AS clause
// We don't allow this for ExprResolver - the Group By case
pos = genColListRegex(unescapeIdentifier(expr.getChild(1).getText()),
unescapeIdentifier(expr.getChild(0).getChild(0).getText()
.toLowerCase()), expr, col_list, inputRR, pos, out_rwsch,
qb.getAliases(), subQuery);
} else {
// Case when this is an expression
TypeCheckCtx tcCtx = new TypeCheckCtx(inputRR);
// We allow stateful functions in the SELECT list (but nowhere else)
tcCtx.setAllowStatefulFunctions(true);
tcCtx.setAllowDistinctFunctions(false);
ExprNodeDesc exp = genExprNodeDesc(expr, inputRR, tcCtx);
String recommended = recommendName(exp, colAlias);
if (recommended != null && !colAliases.contains(recommended) &&
out_rwsch.get(null, recommended) == null) {
colAlias = recommended;
}
col_list.add(exp);
if (subQuery) {
out_rwsch.checkColumn(tabAlias, colAlias);
}
ColumnInfo colInfo = new ColumnInfo(getColumnInternalName(pos),
exp.getWritableObjectInspector(), tabAlias, false);
colInfo.setSkewedCol((exp instanceof ExprNodeColumnDesc) ? ((ExprNodeColumnDesc) exp)
.isSkewedCol() : false);
out_rwsch.put(tabAlias, colAlias, colInfo);
if ( exp instanceof ExprNodeColumnDesc ) {
ExprNodeColumnDesc colExp = (ExprNodeColumnDesc) exp;
String[] altMapping = inputRR.getAlternateMappings(colExp.getColumn());
if ( altMapping != null ) {
out_rwsch.put(altMapping[0], altMapping[1], colInfo);
}
}
pos = Integer.valueOf(pos.intValue() + 1);
}
}
selectStar = selectStar && exprList.getChildCount() == posn + 1;
ArrayList<String> columnNames = new ArrayList<String>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
for (int i = 0; i < col_list.size(); i++) {
// Replace NULL with CAST(NULL AS STRING)
if (col_list.get(i) instanceof ExprNodeNullDesc) {
col_list.set(i, new ExprNodeConstantDesc(
TypeInfoFactory.stringTypeInfo, null));
}
String outputCol = getColumnInternalName(i);
colExprMap.put(outputCol, col_list.get(i));
columnNames.add(outputCol);
}
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
new SelectDesc(col_list, columnNames, selectStar), new RowSchema(
out_rwsch.getColumnInfos()), input), out_rwsch);
output.setColumnExprMap(colExprMap);
if (isInTransform) {
output = genScriptPlan(trfm, qb, output);
}
if (isUDTF) {
output = genUDTFPlan(genericUDTF, udtfTableAlias, udtfColAliases, qb,
output, outerLV);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Created Select Plan row schema: " + out_rwsch.toString());
}
return output;
}
private String recommendName(ExprNodeDesc exp, String colAlias) {
if (!colAlias.startsWith(autogenColAliasPrfxLbl)) {
return null;
}
String column = ExprNodeDescUtils.recommendInputName(exp);
if (column != null && !column.startsWith(autogenColAliasPrfxLbl)) {
return column;
}
return null;
}
/**
* Class to store GenericUDAF related information.
*/
static class GenericUDAFInfo {
ArrayList<ExprNodeDesc> convertedParameters;
GenericUDAFEvaluator genericUDAFEvaluator;
TypeInfo returnType;
}
/**
* Convert exprNodeDesc array to Typeinfo array.
*/
static ArrayList<TypeInfo> getTypeInfo(ArrayList<ExprNodeDesc> exprs) {
ArrayList<TypeInfo> result = new ArrayList<TypeInfo>();
for (ExprNodeDesc expr : exprs) {
result.add(expr.getTypeInfo());
}
return result;
}
/**
* Convert exprNodeDesc array to ObjectInspector array.
*/
static ArrayList<ObjectInspector> getWritableObjectInspector(ArrayList<ExprNodeDesc> exprs) {
ArrayList<ObjectInspector> result = new ArrayList<ObjectInspector>();
for (ExprNodeDesc expr : exprs) {
result.add(expr.getWritableObjectInspector());
}
return result;
}
/**
* Convert exprNodeDesc array to Typeinfo array.
*/
static ObjectInspector[] getStandardObjectInspector(ArrayList<TypeInfo> exprs) {
ObjectInspector[] result = new ObjectInspector[exprs.size()];
for (int i = 0; i < exprs.size(); i++) {
result[i] = TypeInfoUtils
.getStandardWritableObjectInspectorFromTypeInfo(exprs.get(i));
}
return result;
}
/**
* Returns the GenericUDAFEvaluator for the aggregation. This is called once
* for each GroupBy aggregation.
*/
static GenericUDAFEvaluator getGenericUDAFEvaluator(String aggName,
ArrayList<ExprNodeDesc> aggParameters, ASTNode aggTree,
boolean isDistinct, boolean isAllColumns)
throws SemanticException {
ArrayList<ObjectInspector> originalParameterTypeInfos =
getWritableObjectInspector(aggParameters);
GenericUDAFEvaluator result = FunctionRegistry.getGenericUDAFEvaluator(
aggName, originalParameterTypeInfos, isDistinct, isAllColumns);
if (null == result) {
String reason = "Looking for UDAF Evaluator\"" + aggName
+ "\" with parameters " + originalParameterTypeInfos;
throw new SemanticException(ErrorMsg.INVALID_FUNCTION_SIGNATURE.getMsg(
(ASTNode) aggTree.getChild(0), reason));
}
return result;
}
/**
* Returns the GenericUDAFInfo struct for the aggregation.
*
* @param aggName
* The name of the UDAF.
* @param aggParameters
* The exprNodeDesc of the original parameters
* @param aggTree
* The ASTNode node of the UDAF in the query.
* @return GenericUDAFInfo
* @throws SemanticException
* when the UDAF is not found or has problems.
*/
static GenericUDAFInfo getGenericUDAFInfo(GenericUDAFEvaluator evaluator,
GenericUDAFEvaluator.Mode emode, ArrayList<ExprNodeDesc> aggParameters)
throws SemanticException {
GenericUDAFInfo r = new GenericUDAFInfo();
// set r.genericUDAFEvaluator
r.genericUDAFEvaluator = evaluator;
// set r.returnType
ObjectInspector returnOI = null;
try {
ArrayList<ObjectInspector> aggOIs = getWritableObjectInspector(aggParameters);
ObjectInspector[] aggOIArray = new ObjectInspector[aggOIs.size()];
for (int ii = 0; ii < aggOIs.size(); ++ii) {
aggOIArray[ii] = aggOIs.get(ii);
}
returnOI = r.genericUDAFEvaluator.init(emode, aggOIArray);
r.returnType = TypeInfoUtils.getTypeInfoFromObjectInspector(returnOI);
} catch (HiveException e) {
throw new SemanticException(e);
}
// set r.convertedParameters
// TODO: type conversion
r.convertedParameters = aggParameters;
return r;
}
private static GenericUDAFEvaluator.Mode groupByDescModeToUDAFMode(
GroupByDesc.Mode mode, boolean isDistinct) {
switch (mode) {
case COMPLETE:
return GenericUDAFEvaluator.Mode.COMPLETE;
case PARTIAL1:
return GenericUDAFEvaluator.Mode.PARTIAL1;
case PARTIAL2:
return GenericUDAFEvaluator.Mode.PARTIAL2;
case PARTIALS:
return isDistinct ? GenericUDAFEvaluator.Mode.PARTIAL1
: GenericUDAFEvaluator.Mode.PARTIAL2;
case FINAL:
return GenericUDAFEvaluator.Mode.FINAL;
case HASH:
return GenericUDAFEvaluator.Mode.PARTIAL1;
case MERGEPARTIAL:
return isDistinct ? GenericUDAFEvaluator.Mode.COMPLETE
: GenericUDAFEvaluator.Mode.FINAL;
default:
throw new RuntimeException("internal error in groupByDescModeToUDAFMode");
}
}
/**
* Check if the given internalName represents a constant parameter in aggregation parameters
* of an aggregation tree.
* This method is only invoked when map-side aggregation is not involved. In this case,
* every parameter in every aggregation tree should already have a corresponding ColumnInfo,
* which is generated when the corresponding ReduceSinkOperator of the GroupByOperator being
* generating is generated. If we find that this parameter is a constant parameter,
* we will return the corresponding ExprNodeDesc in reduceValues, and we will not need to
* use a new ExprNodeColumnDesc, which can not be treated as a constant parameter, for this
* parameter (since the writableObjectInspector of a ExprNodeColumnDesc will not be
* a instance of ConstantObjectInspector).
*
* @param reduceValues
* value columns of the corresponding ReduceSinkOperator
* @param internalName
* the internal name of this parameter
* @return the ExprNodeDesc of the constant parameter if the given internalName represents
* a constant parameter; otherwise, return null
*/
private ExprNodeDesc isConstantParameterInAggregationParameters(String internalName,
List<ExprNodeDesc> reduceValues) {
// only the pattern of "VALUE._col([0-9]+)" should be handled.
String[] terms = internalName.split("\\.");
if (terms.length != 2 || reduceValues == null) {
return null;
}
if (Utilities.ReduceField.VALUE.toString().equals(terms[0])) {
int pos = getPositionFromInternalName(terms[1]);
if (pos >= 0 && pos < reduceValues.size()) {
ExprNodeDesc reduceValue = reduceValues.get(pos);
if (reduceValue != null) {
if (reduceValue.getWritableObjectInspector() instanceof ConstantObjectInspector) {
// this internalName represents a constant parameter in aggregation parameters
return reduceValue;
}
}
}
}
return null;
}
/**
* Generate the GroupByOperator for the Query Block (parseInfo.getXXX(dest)).
* The new GroupByOperator will be a child of the reduceSinkOperatorInfo.
*
* @param mode
* The mode of the aggregation (PARTIAL1 or COMPLETE)
* @param genericUDAFEvaluators
* If not null, this function will store the mapping from Aggregation
* StringTree to the genericUDAFEvaluator in this parameter, so it
* can be used in the next-stage GroupBy aggregations.
* @return the new GroupByOperator
*/
@SuppressWarnings("nls")
private Operator genGroupByPlanGroupByOperator(QBParseInfo parseInfo,
String dest, Operator input, ReduceSinkOperator rs, GroupByDesc.Mode mode,
Map<String, GenericUDAFEvaluator> genericUDAFEvaluators)
throws SemanticException {
RowResolver groupByInputRowResolver = opParseCtx
.get(input).getRowResolver();
RowResolver groupByOutputRowResolver = new RowResolver();
groupByOutputRowResolver.setIsExprResolver(true);
ArrayList<ExprNodeDesc> groupByKeys = new ArrayList<ExprNodeDesc>();
ArrayList<AggregationDesc> aggregations = new ArrayList<AggregationDesc>();
ArrayList<String> outputColumnNames = new ArrayList<String>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
List<ASTNode> grpByExprs = getGroupByForClause(parseInfo, dest);
for (int i = 0; i < grpByExprs.size(); ++i) {
ASTNode grpbyExpr = grpByExprs.get(i);
ColumnInfo exprInfo = groupByInputRowResolver.getExpression(grpbyExpr);
if (exprInfo == null) {
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(grpbyExpr));
}
groupByKeys.add(new ExprNodeColumnDesc(exprInfo.getType(), exprInfo
.getInternalName(), "", false));
String field = getColumnInternalName(i);
outputColumnNames.add(field);
ColumnInfo oColInfo = new ColumnInfo(field, exprInfo.getType(), null, false);
groupByOutputRowResolver.putExpression(grpbyExpr,
oColInfo);
addAlternateGByKeyMappings(grpbyExpr, oColInfo, input, groupByOutputRowResolver);
colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
}
// For each aggregation
HashMap<String, ASTNode> aggregationTrees = parseInfo
.getAggregationExprsForClause(dest);
assert (aggregationTrees != null);
// get the last colName for the reduce KEY
// it represents the column name corresponding to distinct aggr, if any
String lastKeyColName = null;
List<String> inputKeyCols = rs.getConf().getOutputKeyColumnNames();
if (inputKeyCols.size() > 0) {
lastKeyColName = inputKeyCols.get(inputKeyCols.size() - 1);
}
List<ExprNodeDesc> reduceValues = rs.getConf().getValueCols();
int numDistinctUDFs = 0;
for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
ASTNode value = entry.getValue();
// This is the GenericUDAF name
String aggName = unescapeIdentifier(value.getChild(0).getText());
boolean isDistinct = value.getType() == HiveParser.TOK_FUNCTIONDI;
boolean isAllColumns = value.getType() == HiveParser.TOK_FUNCTIONSTAR;
// Convert children to aggParameters
ArrayList<ExprNodeDesc> aggParameters = new ArrayList<ExprNodeDesc>();
// 0 is the function name
for (int i = 1; i < value.getChildCount(); i++) {
ASTNode paraExpr = (ASTNode) value.getChild(i);
ColumnInfo paraExprInfo =
groupByInputRowResolver.getExpression(paraExpr);
if (paraExprInfo == null) {
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(paraExpr));
}
String paraExpression = paraExprInfo.getInternalName();
assert (paraExpression != null);
if (isDistinct && lastKeyColName != null) {
// if aggr is distinct, the parameter is name is constructed as
// KEY.lastKeyColName:<tag>._colx
paraExpression = Utilities.ReduceField.KEY.name() + "." +
lastKeyColName + ":" + numDistinctUDFs + "." +
getColumnInternalName(i - 1);
}
ExprNodeDesc expr = new ExprNodeColumnDesc(paraExprInfo.getType(),
paraExpression, paraExprInfo.getTabAlias(),
paraExprInfo.getIsVirtualCol());
ExprNodeDesc reduceValue = isConstantParameterInAggregationParameters(
paraExprInfo.getInternalName(), reduceValues);
if (reduceValue != null) {
// this parameter is a constant
expr = reduceValue;
}
aggParameters.add(expr);
}
if (isDistinct) {
numDistinctUDFs++;
}
Mode amode = groupByDescModeToUDAFMode(mode, isDistinct);
GenericUDAFEvaluator genericUDAFEvaluator = getGenericUDAFEvaluator(
aggName, aggParameters, value, isDistinct, isAllColumns);
assert (genericUDAFEvaluator != null);
GenericUDAFInfo udaf = getGenericUDAFInfo(genericUDAFEvaluator, amode,
aggParameters);
aggregations.add(new AggregationDesc(aggName.toLowerCase(),
udaf.genericUDAFEvaluator, udaf.convertedParameters, isDistinct,
amode));
String field = getColumnInternalName(groupByKeys.size()
+ aggregations.size() - 1);
outputColumnNames.add(field);
groupByOutputRowResolver.putExpression(value, new ColumnInfo(
field, udaf.returnType, "", false));
// Save the evaluator so that it can be used by the next-stage
// GroupByOperators
if (genericUDAFEvaluators != null) {
genericUDAFEvaluators.put(entry.getKey(), genericUDAFEvaluator);
}
}
float groupByMemoryUsage = HiveConf.getFloatVar(conf, HiveConf.ConfVars.HIVEMAPAGGRHASHMEMORY);
float memoryThreshold = HiveConf
.getFloatVar(conf, HiveConf.ConfVars.HIVEMAPAGGRMEMORYTHRESHOLD);
Operator op = putOpInsertMap(OperatorFactory.getAndMakeChild(
new GroupByDesc(mode, outputColumnNames, groupByKeys, aggregations,
false, groupByMemoryUsage, memoryThreshold, null, false, 0, numDistinctUDFs > 0),
new RowSchema(groupByOutputRowResolver.getColumnInfos()),
input), groupByOutputRowResolver);
op.setColumnExprMap(colExprMap);
return op;
}
// Add the grouping set key to the group by operator.
// This is not the first group by operator, but it is a subsequent group by operator
// which is forwarding the grouping keys introduced by the grouping sets.
// For eg: consider: select key, value, count(1) from T group by key, value with rollup.
// Assuming map-side aggregation and no skew, the plan would look like:
//
// TableScan --> Select --> GroupBy1 --> ReduceSink --> GroupBy2 --> Select --> FileSink
//
// This function is called for GroupBy2 to pass the additional grouping keys introduced by
// GroupBy1 for the grouping set (corresponding to the rollup).
private void addGroupingSetKey(List<ExprNodeDesc> groupByKeys,
RowResolver groupByInputRowResolver,
RowResolver groupByOutputRowResolver,
List<String> outputColumnNames,
Map<String, ExprNodeDesc> colExprMap) throws SemanticException {
// For grouping sets, add a dummy grouping key
String groupingSetColumnName =
groupByInputRowResolver.get(null, VirtualColumn.GROUPINGID.getName()).getInternalName();
ExprNodeDesc inputExpr = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,
groupingSetColumnName, null, false);
groupByKeys.add(inputExpr);
String field = getColumnInternalName(groupByKeys.size() - 1);
outputColumnNames.add(field);
groupByOutputRowResolver.put(null, VirtualColumn.GROUPINGID.getName(),
new ColumnInfo(
field,
TypeInfoFactory.stringTypeInfo,
null,
true));
colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
}
// Process grouping set for the reduce sink operator
// For eg: consider: select key, value, count(1) from T group by key, value with rollup.
// Assuming map-side aggregation and no skew, the plan would look like:
//
// TableScan --> Select --> GroupBy1 --> ReduceSink --> GroupBy2 --> Select --> FileSink
//
// This function is called for ReduceSink to add the additional grouping keys introduced by
// GroupBy1 into the reduce keys.
private void processGroupingSetReduceSinkOperator(RowResolver reduceSinkInputRowResolver,
RowResolver reduceSinkOutputRowResolver,
List<ExprNodeDesc> reduceKeys,
List<String> outputKeyColumnNames,
Map<String, ExprNodeDesc> colExprMap) throws SemanticException {
// add a key for reduce sink
String groupingSetColumnName =
reduceSinkInputRowResolver.get(null, VirtualColumn.GROUPINGID.getName()).getInternalName();
ExprNodeDesc inputExpr = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,
groupingSetColumnName, null, false);
reduceKeys.add(inputExpr);
outputKeyColumnNames.add(getColumnInternalName(reduceKeys.size() - 1));
String field = Utilities.ReduceField.KEY.toString() + "."
+ getColumnInternalName(reduceKeys.size() - 1);
ColumnInfo colInfo = new ColumnInfo(field, reduceKeys.get(
reduceKeys.size() - 1).getTypeInfo(), null, true);
reduceSinkOutputRowResolver.put(null, VirtualColumn.GROUPINGID.getName(), colInfo);
colExprMap.put(colInfo.getInternalName(), inputExpr);
}
/**
* Generate the GroupByOperator for the Query Block (parseInfo.getXXX(dest)).
* The new GroupByOperator will be a child of the reduceSinkOperatorInfo.
*
* @param mode
* The mode of the aggregation (MERGEPARTIAL, PARTIAL2)
* @param genericUDAFEvaluators
* The mapping from Aggregation StringTree to the
* genericUDAFEvaluator.
* @param distPartAggr
* partial aggregation for distincts
* @param groupingSets
* list of grouping sets
* @param groupingSetsPresent
* whether grouping sets are present in this query
* @param groupingSetsConsumedCurrentMR
* whether grouping sets are consumed by this group by
* @return the new GroupByOperator
*/
@SuppressWarnings("nls")
private Operator genGroupByPlanGroupByOperator1(QBParseInfo parseInfo,
String dest, Operator reduceSinkOperatorInfo, GroupByDesc.Mode mode,
Map<String, GenericUDAFEvaluator> genericUDAFEvaluators,
boolean distPartAgg,
List<Integer> groupingSets,
boolean groupingSetsPresent,
boolean groupingSetsNeedAdditionalMRJob) throws SemanticException {
ArrayList<String> outputColumnNames = new ArrayList<String>();
RowResolver groupByInputRowResolver = opParseCtx
.get(reduceSinkOperatorInfo).getRowResolver();
RowResolver groupByOutputRowResolver = new RowResolver();
groupByOutputRowResolver.setIsExprResolver(true);
ArrayList<ExprNodeDesc> groupByKeys = new ArrayList<ExprNodeDesc>();
ArrayList<AggregationDesc> aggregations = new ArrayList<AggregationDesc>();
List<ASTNode> grpByExprs = getGroupByForClause(parseInfo, dest);
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
for (int i = 0; i < grpByExprs.size(); ++i) {
ASTNode grpbyExpr = grpByExprs.get(i);
ColumnInfo exprInfo = groupByInputRowResolver.getExpression(grpbyExpr);
if (exprInfo == null) {
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(grpbyExpr));
}
groupByKeys.add(new ExprNodeColumnDesc(exprInfo.getType(), exprInfo
.getInternalName(), exprInfo.getTabAlias(), exprInfo
.getIsVirtualCol()));
String field = getColumnInternalName(i);
outputColumnNames.add(field);
ColumnInfo oColInfo = new ColumnInfo(field, exprInfo.getType(), "", false);
groupByOutputRowResolver.putExpression(grpbyExpr,
oColInfo);
addAlternateGByKeyMappings(grpbyExpr, oColInfo, reduceSinkOperatorInfo, groupByOutputRowResolver);
colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
}
// This is only needed if a new grouping set key is being created
int groupingSetsPosition = 0;
// For grouping sets, add a dummy grouping key
if (groupingSetsPresent) {
// Consider the query: select a,b, count(1) from T group by a,b with cube;
// where it is being executed in a single map-reduce job
// The plan is TableScan -> GroupBy1 -> ReduceSink -> GroupBy2 -> FileSink
// GroupBy1 already added the grouping id as part of the row
// This function is called for GroupBy2 to add grouping id as part of the groupby keys
if (!groupingSetsNeedAdditionalMRJob) {
addGroupingSetKey(
groupByKeys,
groupByInputRowResolver,
groupByOutputRowResolver,
outputColumnNames,
colExprMap);
}
else {
groupingSetsPosition = groupByKeys.size();
// The grouping set has not yet been processed. Create a new grouping key
// Consider the query: select a,b, count(1) from T group by a,b with cube;
// where it is being executed in 2 map-reduce jobs
// The plan for 1st MR is TableScan -> GroupBy1 -> ReduceSink -> GroupBy2 -> FileSink
// GroupBy1/ReduceSink worked as if grouping sets were not present
// This function is called for GroupBy2 to create new rows for grouping sets
// For each input row (a,b), 4 rows are created for the example above:
// (a,b), (a,null), (null, b), (null, null)
createNewGroupingKey(groupByKeys,
outputColumnNames,
groupByOutputRowResolver,
colExprMap);
}
}
HashMap<String, ASTNode> aggregationTrees = parseInfo
.getAggregationExprsForClause(dest);
// get the last colName for the reduce KEY
// it represents the column name corresponding to distinct aggr, if any
String lastKeyColName = null;
List<ExprNodeDesc> reduceValues = null;
if (reduceSinkOperatorInfo.getConf() instanceof ReduceSinkDesc) {
List<String> inputKeyCols = ((ReduceSinkDesc)
reduceSinkOperatorInfo.getConf()).getOutputKeyColumnNames();
if (inputKeyCols.size() > 0) {
lastKeyColName = inputKeyCols.get(inputKeyCols.size() - 1);
}
reduceValues = ((ReduceSinkDesc) reduceSinkOperatorInfo.getConf()).getValueCols();
}
int numDistinctUDFs = 0;
boolean containsDistinctAggr = false;
for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
ASTNode value = entry.getValue();
String aggName = unescapeIdentifier(value.getChild(0).getText());
ArrayList<ExprNodeDesc> aggParameters = new ArrayList<ExprNodeDesc>();
boolean isDistinct = (value.getType() == HiveParser.TOK_FUNCTIONDI);
containsDistinctAggr = containsDistinctAggr || isDistinct;
// If the function is distinct, partial aggregation has not been done on
// the client side.
// If distPartAgg is set, the client is letting us know that partial
// aggregation has not been done.
// For eg: select a, count(b+c), count(distinct d+e) group by a
// For count(b+c), if partial aggregation has been performed, then we
// directly look for count(b+c).
// Otherwise, we look for b+c.
// For distincts, partial aggregation is never performed on the client
// side, so always look for the parameters: d+e
boolean partialAggDone = !(distPartAgg || isDistinct);
if (!partialAggDone) {
// 0 is the function name
for (int i = 1; i < value.getChildCount(); i++) {
ASTNode paraExpr = (ASTNode) value.getChild(i);
ColumnInfo paraExprInfo =
groupByInputRowResolver.getExpression(paraExpr);
if (paraExprInfo == null) {
throw new SemanticException(ErrorMsg.INVALID_COLUMN
.getMsg(paraExpr));
}
String paraExpression = paraExprInfo.getInternalName();
assert (paraExpression != null);
if (isDistinct && lastKeyColName != null) {
// if aggr is distinct, the parameter is name is constructed as
// KEY.lastKeyColName:<tag>._colx
paraExpression = Utilities.ReduceField.KEY.name() + "." +
lastKeyColName + ":" + numDistinctUDFs + "."
+ getColumnInternalName(i - 1);
}
ExprNodeDesc expr = new ExprNodeColumnDesc(paraExprInfo.getType(),
paraExpression, paraExprInfo.getTabAlias(),
paraExprInfo.getIsVirtualCol());
ExprNodeDesc reduceValue = isConstantParameterInAggregationParameters(
paraExprInfo.getInternalName(), reduceValues);
if (reduceValue != null) {
// this parameter is a constant
expr = reduceValue;
}
aggParameters.add(expr);
}
} else {
ColumnInfo paraExprInfo = groupByInputRowResolver.getExpression(value);
if (paraExprInfo == null) {
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(value));
}
String paraExpression = paraExprInfo.getInternalName();
assert (paraExpression != null);
aggParameters.add(new ExprNodeColumnDesc(paraExprInfo.getType(),
paraExpression, paraExprInfo.getTabAlias(), paraExprInfo
.getIsVirtualCol()));
}
if (isDistinct) {
numDistinctUDFs++;
}
boolean isAllColumns = value.getType() == HiveParser.TOK_FUNCTIONSTAR;
Mode amode = groupByDescModeToUDAFMode(mode, isDistinct);
GenericUDAFEvaluator genericUDAFEvaluator = null;
// For distincts, partial aggregations have not been done
if (distPartAgg) {
genericUDAFEvaluator = getGenericUDAFEvaluator(aggName, aggParameters,
value, isDistinct, isAllColumns);
assert (genericUDAFEvaluator != null);
genericUDAFEvaluators.put(entry.getKey(), genericUDAFEvaluator);
} else {
genericUDAFEvaluator = genericUDAFEvaluators.get(entry.getKey());
assert (genericUDAFEvaluator != null);
}
GenericUDAFInfo udaf = getGenericUDAFInfo(genericUDAFEvaluator, amode,
aggParameters);
aggregations.add(new AggregationDesc(aggName.toLowerCase(),
udaf.genericUDAFEvaluator, udaf.convertedParameters,
(mode != GroupByDesc.Mode.FINAL && isDistinct), amode));
String field = getColumnInternalName(groupByKeys.size()
+ aggregations.size() - 1);
outputColumnNames.add(field);
groupByOutputRowResolver.putExpression(value, new ColumnInfo(
field, udaf.returnType, "", false));
}
float groupByMemoryUsage = HiveConf.getFloatVar(conf, HiveConf.ConfVars.HIVEMAPAGGRHASHMEMORY);
float memoryThreshold = HiveConf
.getFloatVar(conf, HiveConf.ConfVars.HIVEMAPAGGRMEMORYTHRESHOLD);
// Nothing special needs to be done for grouping sets if
// this is the final group by operator, and multiple rows corresponding to the
// grouping sets have been generated upstream.
// However, if an addition MR job has been created to handle grouping sets,
// additional rows corresponding to grouping sets need to be created here.
Operator op = putOpInsertMap(OperatorFactory.getAndMakeChild(
new GroupByDesc(mode, outputColumnNames, groupByKeys, aggregations,
distPartAgg, groupByMemoryUsage, memoryThreshold,
groupingSets,
groupingSetsPresent && groupingSetsNeedAdditionalMRJob,
groupingSetsPosition, containsDistinctAggr),
new RowSchema(groupByOutputRowResolver.getColumnInfos()), reduceSinkOperatorInfo),
groupByOutputRowResolver);
op.setColumnExprMap(colExprMap);
return op;
}
/*
* Create a new grouping key for grouping id.
* A dummy grouping id. is added. At runtime, the group by operator
* creates 'n' rows per input row, where 'n' is the number of grouping sets.
*/
private void createNewGroupingKey(List<ExprNodeDesc> groupByKeys,
List<String> outputColumnNames,
RowResolver groupByOutputRowResolver,
Map<String, ExprNodeDesc> colExprMap) {
// The value for the constant does not matter. It is replaced by the grouping set
// value for the actual implementation
ExprNodeConstantDesc constant = new ExprNodeConstantDesc("0");
groupByKeys.add(constant);
String field = getColumnInternalName(groupByKeys.size() - 1);
outputColumnNames.add(field);
groupByOutputRowResolver.put(null, VirtualColumn.GROUPINGID.getName(),
new ColumnInfo(
field,
TypeInfoFactory.stringTypeInfo,
null,
true));
colExprMap.put(field, constant);
}
/**
* Generate the map-side GroupByOperator for the Query Block
* (qb.getParseInfo().getXXX(dest)). The new GroupByOperator will be a child
* of the inputOperatorInfo.
*
* @param mode
* The mode of the aggregation (HASH)
* @param genericUDAFEvaluators
* If not null, this function will store the mapping from Aggregation
* StringTree to the genericUDAFEvaluator in this parameter, so it
* can be used in the next-stage GroupBy aggregations.
* @return the new GroupByOperator
*/
@SuppressWarnings("nls")
private Operator genGroupByPlanMapGroupByOperator(QB qb,
String dest,
List<ASTNode> grpByExprs,
Operator inputOperatorInfo,
GroupByDesc.Mode mode,
Map<String, GenericUDAFEvaluator> genericUDAFEvaluators,
List<Integer> groupingSetKeys,
boolean groupingSetsPresent) throws SemanticException {
RowResolver groupByInputRowResolver = opParseCtx.get(inputOperatorInfo)
.getRowResolver();
QBParseInfo parseInfo = qb.getParseInfo();
RowResolver groupByOutputRowResolver = new RowResolver();
groupByOutputRowResolver.setIsExprResolver(true);
ArrayList<ExprNodeDesc> groupByKeys = new ArrayList<ExprNodeDesc>();
ArrayList<String> outputColumnNames = new ArrayList<String>();
ArrayList<AggregationDesc> aggregations = new ArrayList<AggregationDesc>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
for (int i = 0; i < grpByExprs.size(); ++i) {
ASTNode grpbyExpr = grpByExprs.get(i);
ExprNodeDesc grpByExprNode = genExprNodeDesc(grpbyExpr,
groupByInputRowResolver);
groupByKeys.add(grpByExprNode);
String field = getColumnInternalName(i);
outputColumnNames.add(field);
groupByOutputRowResolver.putExpression(grpbyExpr,
new ColumnInfo(field, grpByExprNode.getTypeInfo(), "", false));
colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
}
// The grouping set key is present after the grouping keys, before the distinct keys
int groupingSetsPosition = groupByKeys.size();
// For grouping sets, add a dummy grouping key
// This dummy key needs to be added as a reduce key
// For eg: consider: select key, value, count(1) from T group by key, value with rollup.
// Assuming map-side aggregation and no skew, the plan would look like:
//
// TableScan --> Select --> GroupBy1 --> ReduceSink --> GroupBy2 --> Select --> FileSink
//
// This function is called for GroupBy1 to create an additional grouping key
// for the grouping set (corresponding to the rollup).
if (groupingSetsPresent) {
createNewGroupingKey(groupByKeys,
outputColumnNames,
groupByOutputRowResolver,
colExprMap);
}
// If there is a distinctFuncExp, add all parameters to the reduceKeys.
if (!parseInfo.getDistinctFuncExprsForClause(dest).isEmpty()) {
List<ASTNode> list = parseInfo.getDistinctFuncExprsForClause(dest);
for (ASTNode value : list) {
// 0 is function name
for (int i = 1; i < value.getChildCount(); i++) {
ASTNode parameter = (ASTNode) value.getChild(i);
if (groupByOutputRowResolver.getExpression(parameter) == null) {
ExprNodeDesc distExprNode = genExprNodeDesc(parameter,
groupByInputRowResolver);
groupByKeys.add(distExprNode);
String field = getColumnInternalName(groupByKeys.size() - 1);
outputColumnNames.add(field);
groupByOutputRowResolver.putExpression(parameter, new ColumnInfo(
field, distExprNode.getTypeInfo(), "", false));
colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
}
}
}
}
// For each aggregation
HashMap<String, ASTNode> aggregationTrees = parseInfo
.getAggregationExprsForClause(dest);
assert (aggregationTrees != null);
boolean containsDistinctAggr = false;
for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
ASTNode value = entry.getValue();
String aggName = unescapeIdentifier(value.getChild(0).getText());
ArrayList<ExprNodeDesc> aggParameters = new ArrayList<ExprNodeDesc>();
// 0 is the function name
for (int i = 1; i < value.getChildCount(); i++) {
ASTNode paraExpr = (ASTNode) value.getChild(i);
ExprNodeDesc paraExprNode = genExprNodeDesc(paraExpr,
groupByInputRowResolver);
aggParameters.add(paraExprNode);
}
boolean isDistinct = value.getType() == HiveParser.TOK_FUNCTIONDI;
containsDistinctAggr = containsDistinctAggr || isDistinct;
boolean isAllColumns = value.getType() == HiveParser.TOK_FUNCTIONSTAR;
Mode amode = groupByDescModeToUDAFMode(mode, isDistinct);
GenericUDAFEvaluator genericUDAFEvaluator = getGenericUDAFEvaluator(
aggName, aggParameters, value, isDistinct, isAllColumns);
assert (genericUDAFEvaluator != null);
GenericUDAFInfo udaf = getGenericUDAFInfo(genericUDAFEvaluator, amode,
aggParameters);
aggregations.add(new AggregationDesc(aggName.toLowerCase(),
udaf.genericUDAFEvaluator, udaf.convertedParameters, isDistinct,
amode));
String field = getColumnInternalName(groupByKeys.size()
+ aggregations.size() - 1);
outputColumnNames.add(field);
if (groupByOutputRowResolver.getExpression(value) == null) {
groupByOutputRowResolver.putExpression(value, new ColumnInfo(
field, udaf.returnType, "", false));
}
// Save the evaluator so that it can be used by the next-stage
// GroupByOperators
if (genericUDAFEvaluators != null) {
genericUDAFEvaluators.put(entry.getKey(), genericUDAFEvaluator);
}
}
float groupByMemoryUsage = HiveConf.getFloatVar(conf, HiveConf.ConfVars.HIVEMAPAGGRHASHMEMORY);
float memoryThreshold = HiveConf
.getFloatVar(conf, HiveConf.ConfVars.HIVEMAPAGGRMEMORYTHRESHOLD);
Operator op = putOpInsertMap(OperatorFactory.getAndMakeChild(
new GroupByDesc(mode, outputColumnNames, groupByKeys, aggregations,
false, groupByMemoryUsage, memoryThreshold,
groupingSetKeys, groupingSetsPresent, groupingSetsPosition, containsDistinctAggr),
new RowSchema(groupByOutputRowResolver.getColumnInfos()),
inputOperatorInfo), groupByOutputRowResolver);
op.setColumnExprMap(colExprMap);
return op;
}
/**
* Generate the ReduceSinkOperator for the Group By Query Block
* (qb.getPartInfo().getXXX(dest)). The new ReduceSinkOperator will be a child
* of inputOperatorInfo.
*
* It will put all Group By keys and the distinct field (if any) in the
* map-reduce sort key, and all other fields in the map-reduce value.
*
* @param numPartitionFields
* the number of fields for map-reduce partitioning. This is usually
* the number of fields in the Group By keys.
* @return the new ReduceSinkOperator.
* @throws SemanticException
*/
@SuppressWarnings("nls")
private ReduceSinkOperator genGroupByPlanReduceSinkOperator(QB qb,
String dest,
Operator inputOperatorInfo,
List<ASTNode> grpByExprs,
int numPartitionFields,
boolean changeNumPartitionFields,
int numReducers,
boolean mapAggrDone,
boolean groupingSetsPresent) throws SemanticException {
RowResolver reduceSinkInputRowResolver = opParseCtx.get(inputOperatorInfo)
.getRowResolver();
QBParseInfo parseInfo = qb.getParseInfo();
RowResolver reduceSinkOutputRowResolver = new RowResolver();
reduceSinkOutputRowResolver.setIsExprResolver(true);
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
// Pre-compute group-by keys and store in reduceKeys
List<String> outputKeyColumnNames = new ArrayList<String>();
List<String> outputValueColumnNames = new ArrayList<String>();
ArrayList<ExprNodeDesc> reduceKeys = getReduceKeysForReduceSink(grpByExprs, dest,
reduceSinkInputRowResolver, reduceSinkOutputRowResolver, outputKeyColumnNames,
colExprMap);
// add a key for reduce sink
if (groupingSetsPresent) {
// Process grouping set for the reduce sink operator
processGroupingSetReduceSinkOperator(
reduceSinkInputRowResolver,
reduceSinkOutputRowResolver,
reduceKeys,
outputKeyColumnNames,
colExprMap);
if (changeNumPartitionFields) {
numPartitionFields++;
}
}
List<List<Integer>> distinctColIndices = getDistinctColIndicesForReduceSink(parseInfo, dest,
reduceKeys, reduceSinkInputRowResolver, reduceSinkOutputRowResolver, outputKeyColumnNames,
colExprMap);
ArrayList<ExprNodeDesc> reduceValues = new ArrayList<ExprNodeDesc>();
HashMap<String, ASTNode> aggregationTrees = parseInfo
.getAggregationExprsForClause(dest);
if (!mapAggrDone) {
getReduceValuesForReduceSinkNoMapAgg(parseInfo, dest, reduceSinkInputRowResolver,
reduceSinkOutputRowResolver, outputValueColumnNames, reduceValues, colExprMap);
} else {
// Put partial aggregation results in reduceValues
int inputField = reduceKeys.size();
for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
TypeInfo type = reduceSinkInputRowResolver.getColumnInfos().get(
inputField).getType();
ExprNodeColumnDesc exprDesc = new ExprNodeColumnDesc(type,
getColumnInternalName(inputField), "", false);
reduceValues.add(exprDesc);
inputField++;
String outputColName = getColumnInternalName(reduceValues.size() - 1);
outputValueColumnNames.add(outputColName);
String internalName = Utilities.ReduceField.VALUE.toString() + "."
+ outputColName;
reduceSinkOutputRowResolver.putExpression(entry.getValue(),
new ColumnInfo(internalName, type, null, false));
colExprMap.put(internalName, exprDesc);
}
}
ReduceSinkOperator rsOp = (ReduceSinkOperator) putOpInsertMap(
OperatorFactory.getAndMakeChild(
PlanUtils.getReduceSinkDesc(reduceKeys,
groupingSetsPresent ? grpByExprs.size() + 1 : grpByExprs.size(),
reduceValues, distinctColIndices,
outputKeyColumnNames, outputValueColumnNames, true, -1, numPartitionFields,
numReducers),
new RowSchema(reduceSinkOutputRowResolver.getColumnInfos()), inputOperatorInfo),
reduceSinkOutputRowResolver);
rsOp.setColumnExprMap(colExprMap);
return rsOp;
}
private ArrayList<ExprNodeDesc> getReduceKeysForReduceSink(List<ASTNode> grpByExprs, String dest,
RowResolver reduceSinkInputRowResolver, RowResolver reduceSinkOutputRowResolver,
List<String> outputKeyColumnNames, Map<String, ExprNodeDesc> colExprMap)
throws SemanticException {
ArrayList<ExprNodeDesc> reduceKeys = new ArrayList<ExprNodeDesc>();
for (int i = 0; i < grpByExprs.size(); ++i) {
ASTNode grpbyExpr = grpByExprs.get(i);
ExprNodeDesc inputExpr = genExprNodeDesc(grpbyExpr,
reduceSinkInputRowResolver);
reduceKeys.add(inputExpr);
if (reduceSinkOutputRowResolver.getExpression(grpbyExpr) == null) {
outputKeyColumnNames.add(getColumnInternalName(reduceKeys.size() - 1));
String field = Utilities.ReduceField.KEY.toString() + "."
+ getColumnInternalName(reduceKeys.size() - 1);
ColumnInfo colInfo = new ColumnInfo(field, reduceKeys.get(
reduceKeys.size() - 1).getTypeInfo(), null, false);
reduceSinkOutputRowResolver.putExpression(grpbyExpr, colInfo);
colExprMap.put(colInfo.getInternalName(), inputExpr);
} else {
throw new SemanticException(ErrorMsg.DUPLICATE_GROUPBY_KEY
.getMsg(grpbyExpr));
}
}
return reduceKeys;
}
private List<List<Integer>> getDistinctColIndicesForReduceSink(QBParseInfo parseInfo,
String dest,
List<ExprNodeDesc> reduceKeys, RowResolver reduceSinkInputRowResolver,
RowResolver reduceSinkOutputRowResolver, List<String> outputKeyColumnNames,
Map<String, ExprNodeDesc> colExprMap)
throws SemanticException {
List<List<Integer>> distinctColIndices = new ArrayList<List<Integer>>();
// If there is a distinctFuncExp, add all parameters to the reduceKeys.
if (!parseInfo.getDistinctFuncExprsForClause(dest).isEmpty()) {
List<ASTNode> distFuncs = parseInfo.getDistinctFuncExprsForClause(dest);
String colName = getColumnInternalName(reduceKeys.size());
outputKeyColumnNames.add(colName);
for (int i = 0; i < distFuncs.size(); i++) {
ASTNode value = distFuncs.get(i);
int numExprs = 0;
List<Integer> distinctIndices = new ArrayList<Integer>();
// 0 is function name
for (int j = 1; j < value.getChildCount(); j++) {
ASTNode parameter = (ASTNode) value.getChild(j);
ExprNodeDesc expr = genExprNodeDesc(parameter, reduceSinkInputRowResolver);
// see if expr is already present in reduceKeys.
// get index of expr in reduceKeys
int ri;
for (ri = 0; ri < reduceKeys.size(); ri++) {
if (reduceKeys.get(ri).getExprString().equals(expr.getExprString())) {
break;
}
}
// add the expr to reduceKeys if it is not present
if (ri == reduceKeys.size()) {
String name = getColumnInternalName(numExprs);
String field = Utilities.ReduceField.KEY.toString() + "." + colName
+ ":" + i
+ "." + name;
ColumnInfo colInfo = new ColumnInfo(field, expr.getTypeInfo(), null, false);
reduceSinkOutputRowResolver.putExpression(parameter, colInfo);
colExprMap.put(field, expr);
reduceKeys.add(expr);
}
// add the index of expr in reduceKeys to distinctIndices
distinctIndices.add(ri);
numExprs++;
}
distinctColIndices.add(distinctIndices);
}
}
return distinctColIndices;
}
private void getReduceValuesForReduceSinkNoMapAgg(QBParseInfo parseInfo, String dest,
RowResolver reduceSinkInputRowResolver, RowResolver reduceSinkOutputRowResolver,
List<String> outputValueColumnNames, ArrayList<ExprNodeDesc> reduceValues,
Map<String, ExprNodeDesc> colExprMap) throws SemanticException {
HashMap<String, ASTNode> aggregationTrees = parseInfo
.getAggregationExprsForClause(dest);
// Put parameters to aggregations in reduceValues
for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
ASTNode value = entry.getValue();
// 0 is function name
for (int i = 1; i < value.getChildCount(); i++) {
ASTNode parameter = (ASTNode) value.getChild(i);
if (reduceSinkOutputRowResolver.getExpression(parameter) == null) {
ExprNodeDesc exprDesc = genExprNodeDesc(parameter, reduceSinkInputRowResolver);
reduceValues.add(exprDesc);
outputValueColumnNames
.add(getColumnInternalName(reduceValues.size() - 1));
String field = Utilities.ReduceField.VALUE.toString() + "."
+ getColumnInternalName(reduceValues.size() - 1);
reduceSinkOutputRowResolver.putExpression(parameter, new ColumnInfo(field,
reduceValues.get(reduceValues.size() - 1).getTypeInfo(), null,
false));
colExprMap.put(field, exprDesc);
}
}
}
}
@SuppressWarnings("nls")
private ReduceSinkOperator genCommonGroupByPlanReduceSinkOperator(QB qb, List<String> dests,
Operator inputOperatorInfo) throws SemanticException {
RowResolver reduceSinkInputRowResolver = opParseCtx.get(inputOperatorInfo)
.getRowResolver();
QBParseInfo parseInfo = qb.getParseInfo();
RowResolver reduceSinkOutputRowResolver = new RowResolver();
reduceSinkOutputRowResolver.setIsExprResolver(true);
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
// The group by keys and distinct keys should be the same for all dests, so using the first
// one to produce these will be the same as using any other.
String dest = dests.get(0);
// Pre-compute group-by keys and store in reduceKeys
List<String> outputKeyColumnNames = new ArrayList<String>();
List<String> outputValueColumnNames = new ArrayList<String>();
List<ASTNode> grpByExprs = getGroupByForClause(parseInfo, dest);
ArrayList<ExprNodeDesc> reduceKeys = getReduceKeysForReduceSink(grpByExprs, dest,
reduceSinkInputRowResolver, reduceSinkOutputRowResolver, outputKeyColumnNames,
colExprMap);
List<List<Integer>> distinctColIndices = getDistinctColIndicesForReduceSink(parseInfo, dest,
reduceKeys, reduceSinkInputRowResolver, reduceSinkOutputRowResolver, outputKeyColumnNames,
colExprMap);
ArrayList<ExprNodeDesc> reduceValues = new ArrayList<ExprNodeDesc>();
// The dests can have different non-distinct aggregations, so we have to iterate over all of
// them
for (String destination : dests) {
getReduceValuesForReduceSinkNoMapAgg(parseInfo, destination, reduceSinkInputRowResolver,
reduceSinkOutputRowResolver, outputValueColumnNames, reduceValues, colExprMap);
// Need to pass all of the columns used in the where clauses as reduce values
ASTNode whereClause = parseInfo.getWhrForClause(destination);
if (whereClause != null) {
assert whereClause.getChildCount() == 1;
ASTNode predicates = (ASTNode) whereClause.getChild(0);
Map<ASTNode, ExprNodeDesc> nodeOutputs =
genAllExprNodeDesc(predicates, reduceSinkInputRowResolver);
removeMappingForKeys(predicates, nodeOutputs, reduceKeys);
// extract columns missing in current RS key/value
for (Map.Entry<ASTNode, ExprNodeDesc> entry : nodeOutputs.entrySet()) {
ASTNode parameter = entry.getKey();
ExprNodeDesc expression = entry.getValue();
if (!(expression instanceof ExprNodeColumnDesc)) {
continue;
}
if (ExprNodeDescUtils.indexOf(expression, reduceValues) >= 0) {
continue;
}
String internalName = getColumnInternalName(reduceValues.size());
String field = Utilities.ReduceField.VALUE.toString() + "." + internalName;
reduceValues.add(expression);
outputValueColumnNames.add(internalName);
reduceSinkOutputRowResolver.putExpression(parameter,
new ColumnInfo(field, expression.getTypeInfo(), null, false));
colExprMap.put(field, expression);
}
}
}
ReduceSinkOperator rsOp = (ReduceSinkOperator) putOpInsertMap(
OperatorFactory.getAndMakeChild(PlanUtils.getReduceSinkDesc(reduceKeys,
grpByExprs.size(), reduceValues, distinctColIndices,
outputKeyColumnNames, outputValueColumnNames, true, -1, grpByExprs.size(),
-1), new RowSchema(reduceSinkOutputRowResolver
.getColumnInfos()), inputOperatorInfo), reduceSinkOutputRowResolver);
rsOp.setColumnExprMap(colExprMap);
return rsOp;
}
// Remove expression node descriptor and children of it for a given predicate
// from mapping if it's already on RS keys.
// Remaining column expressions would be a candidate for an RS value
private void removeMappingForKeys(ASTNode predicate, Map<ASTNode, ExprNodeDesc> mapping,
List<ExprNodeDesc> keys) {
ExprNodeDesc expr = mapping.get(predicate);
if (expr != null && ExprNodeDescUtils.indexOf(expr, keys) >= 0) {
removeRecursively(predicate, mapping);
} else {
for (int i = 0; i < predicate.getChildCount(); i++) {
removeMappingForKeys((ASTNode) predicate.getChild(i), mapping, keys);
}
}
}
// Remove expression node desc and all children of it from mapping
private void removeRecursively(ASTNode current, Map<ASTNode, ExprNodeDesc> mapping) {
mapping.remove(current);
for (int i = 0; i < current.getChildCount(); i++) {
removeRecursively((ASTNode) current.getChild(i), mapping);
}
}
/**
* Generate the second ReduceSinkOperator for the Group By Plan
* (parseInfo.getXXX(dest)). The new ReduceSinkOperator will be a child of
* groupByOperatorInfo.
*
* The second ReduceSinkOperator will put the group by keys in the map-reduce
* sort key, and put the partial aggregation results in the map-reduce value.
*
* @param numPartitionFields
* the number of fields in the map-reduce partition key. This should
* always be the same as the number of Group By keys. We should be
* able to remove this parameter since in this phase there is no
* distinct any more.
* @return the new ReduceSinkOperator.
* @throws SemanticException
*/
@SuppressWarnings("nls")
private Operator genGroupByPlanReduceSinkOperator2MR(QBParseInfo parseInfo,
String dest,
Operator groupByOperatorInfo,
int numPartitionFields,
int numReducers,
boolean groupingSetsPresent) throws SemanticException {
RowResolver reduceSinkInputRowResolver2 = opParseCtx.get(
groupByOperatorInfo).getRowResolver();
RowResolver reduceSinkOutputRowResolver2 = new RowResolver();
reduceSinkOutputRowResolver2.setIsExprResolver(true);
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
ArrayList<ExprNodeDesc> reduceKeys = new ArrayList<ExprNodeDesc>();
ArrayList<String> outputColumnNames = new ArrayList<String>();
// Get group-by keys and store in reduceKeys
List<ASTNode> grpByExprs = getGroupByForClause(parseInfo, dest);
for (int i = 0; i < grpByExprs.size(); ++i) {
ASTNode grpbyExpr = grpByExprs.get(i);
String field = getColumnInternalName(i);
outputColumnNames.add(field);
TypeInfo typeInfo = reduceSinkInputRowResolver2.getExpression(
grpbyExpr).getType();
ExprNodeColumnDesc inputExpr = new ExprNodeColumnDesc(typeInfo, field,
"", false);
reduceKeys.add(inputExpr);
ColumnInfo colInfo = new ColumnInfo(Utilities.ReduceField.KEY.toString()
+ "." + field, typeInfo, "", false);
reduceSinkOutputRowResolver2.putExpression(grpbyExpr, colInfo);
colExprMap.put(colInfo.getInternalName(), inputExpr);
}
// add a key for reduce sink
if (groupingSetsPresent) {
// Note that partitioning fields dont need to change, since it is either
// partitioned randomly, or by all grouping keys + distinct keys
processGroupingSetReduceSinkOperator(
reduceSinkInputRowResolver2,
reduceSinkOutputRowResolver2,
reduceKeys,
outputColumnNames,
colExprMap);
}
// Get partial aggregation results and store in reduceValues
ArrayList<ExprNodeDesc> reduceValues = new ArrayList<ExprNodeDesc>();
int inputField = reduceKeys.size();
HashMap<String, ASTNode> aggregationTrees = parseInfo
.getAggregationExprsForClause(dest);
for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
String field = getColumnInternalName(inputField);
ASTNode t = entry.getValue();
TypeInfo typeInfo = reduceSinkInputRowResolver2.getExpression(t)
.getType();
ExprNodeColumnDesc exprDesc = new ExprNodeColumnDesc(typeInfo, field, "", false);
reduceValues.add(exprDesc);
inputField++;
String col = getColumnInternalName(reduceValues.size() - 1);
outputColumnNames.add(col);
reduceSinkOutputRowResolver2.putExpression(t, new ColumnInfo(
Utilities.ReduceField.VALUE.toString() + "." + col, typeInfo, "",
false));
colExprMap.put(col, exprDesc);
}
ReduceSinkOperator rsOp = (ReduceSinkOperator) putOpInsertMap(
OperatorFactory.getAndMakeChild(PlanUtils.getReduceSinkDesc(reduceKeys,
reduceValues, outputColumnNames, true, -1, numPartitionFields,
numReducers), new RowSchema(reduceSinkOutputRowResolver2
.getColumnInfos()), groupByOperatorInfo),
reduceSinkOutputRowResolver2);
rsOp.setColumnExprMap(colExprMap);
return rsOp;
}
/**
* Generate the second GroupByOperator for the Group By Plan
* (parseInfo.getXXX(dest)). The new GroupByOperator will do the second
* aggregation based on the partial aggregation results.
*
* @param mode
* the mode of aggregation (FINAL)
* @param genericUDAFEvaluators
* The mapping from Aggregation StringTree to the
* genericUDAFEvaluator.
* @return the new GroupByOperator
* @throws SemanticException
*/
@SuppressWarnings("nls")
private Operator genGroupByPlanGroupByOperator2MR(QBParseInfo parseInfo,
String dest,
Operator reduceSinkOperatorInfo2,
GroupByDesc.Mode mode,
Map<String, GenericUDAFEvaluator> genericUDAFEvaluators,
boolean groupingSetsPresent) throws SemanticException {
RowResolver groupByInputRowResolver2 = opParseCtx.get(
reduceSinkOperatorInfo2).getRowResolver();
RowResolver groupByOutputRowResolver2 = new RowResolver();
groupByOutputRowResolver2.setIsExprResolver(true);
ArrayList<ExprNodeDesc> groupByKeys = new ArrayList<ExprNodeDesc>();
ArrayList<AggregationDesc> aggregations = new ArrayList<AggregationDesc>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
List<ASTNode> grpByExprs = getGroupByForClause(parseInfo, dest);
ArrayList<String> outputColumnNames = new ArrayList<String>();
for (int i = 0; i < grpByExprs.size(); ++i) {
ASTNode grpbyExpr = grpByExprs.get(i);
ColumnInfo exprInfo = groupByInputRowResolver2.getExpression(grpbyExpr);
if (exprInfo == null) {
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(grpbyExpr));
}
String expression = exprInfo.getInternalName();
groupByKeys.add(new ExprNodeColumnDesc(exprInfo.getType(), expression,
exprInfo.getTabAlias(), exprInfo.getIsVirtualCol()));
String field = getColumnInternalName(i);
outputColumnNames.add(field);
ColumnInfo oColInfo = new ColumnInfo(field, exprInfo.getType(), "", false);
groupByOutputRowResolver2.putExpression(grpbyExpr,
oColInfo);
addAlternateGByKeyMappings(grpbyExpr, oColInfo, reduceSinkOperatorInfo2, groupByOutputRowResolver2);
colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
}
// For grouping sets, add a dummy grouping key
if (groupingSetsPresent) {
addGroupingSetKey(
groupByKeys,
groupByInputRowResolver2,
groupByOutputRowResolver2,
outputColumnNames,
colExprMap);
}
HashMap<String, ASTNode> aggregationTrees = parseInfo
.getAggregationExprsForClause(dest);
boolean containsDistinctAggr = false;
for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
ArrayList<ExprNodeDesc> aggParameters = new ArrayList<ExprNodeDesc>();
ASTNode value = entry.getValue();
ColumnInfo paraExprInfo = groupByInputRowResolver2.getExpression(value);
if (paraExprInfo == null) {
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(value));
}
String paraExpression = paraExprInfo.getInternalName();
assert (paraExpression != null);
aggParameters.add(new ExprNodeColumnDesc(paraExprInfo.getType(),
paraExpression, paraExprInfo.getTabAlias(), paraExprInfo
.getIsVirtualCol()));
String aggName = unescapeIdentifier(value.getChild(0).getText());
boolean isDistinct = value.getType() == HiveParser.TOK_FUNCTIONDI;
containsDistinctAggr = containsDistinctAggr || isDistinct;
boolean isStar = value.getType() == HiveParser.TOK_FUNCTIONSTAR;
Mode amode = groupByDescModeToUDAFMode(mode, isDistinct);
GenericUDAFEvaluator genericUDAFEvaluator = genericUDAFEvaluators
.get(entry.getKey());
assert (genericUDAFEvaluator != null);
GenericUDAFInfo udaf = getGenericUDAFInfo(genericUDAFEvaluator, amode,
aggParameters);
aggregations
.add(new AggregationDesc(
aggName.toLowerCase(),
udaf.genericUDAFEvaluator,
udaf.convertedParameters,
(mode != GroupByDesc.Mode.FINAL && value.getToken().getType() ==
HiveParser.TOK_FUNCTIONDI),
amode));
String field = getColumnInternalName(groupByKeys.size()
+ aggregations.size() - 1);
outputColumnNames.add(field);
groupByOutputRowResolver2.putExpression(value, new ColumnInfo(
field, udaf.returnType, "", false));
}
float groupByMemoryUsage = HiveConf.getFloatVar(conf, HiveConf.ConfVars.HIVEMAPAGGRHASHMEMORY);
float memoryThreshold = HiveConf
.getFloatVar(conf, HiveConf.ConfVars.HIVEMAPAGGRMEMORYTHRESHOLD);
Operator op = putOpInsertMap(OperatorFactory.getAndMakeChild(
new GroupByDesc(mode, outputColumnNames, groupByKeys, aggregations,
false, groupByMemoryUsage, memoryThreshold, null, false, 0, containsDistinctAggr),
new RowSchema(groupByOutputRowResolver2.getColumnInfos()),
reduceSinkOperatorInfo2), groupByOutputRowResolver2);
op.setColumnExprMap(colExprMap);
return op;
}
/**
* Generate a Group-By plan using a single map-reduce job (3 operators will be
* inserted):
*
* ReduceSink ( keys = (K1_EXP, K2_EXP, DISTINCT_EXP), values = (A1_EXP,
* A2_EXP) ) SortGroupBy (keys = (KEY.0,KEY.1), aggregations =
* (count_distinct(KEY.2), sum(VALUE.0), count(VALUE.1))) Select (final
* selects).
*
* @param dest
* @param qb
* @param input
* @return
* @throws SemanticException
*
* Generate a Group-By plan using 1 map-reduce job. Spray by the
* group by key, and sort by the distinct key (if any), and compute
* aggregates * The aggregation evaluation functions are as
* follows: Partitioning Key: grouping key
*
* Sorting Key: grouping key if no DISTINCT grouping + distinct key
* if DISTINCT
*
* Reducer: iterate/merge (mode = COMPLETE)
**/
@SuppressWarnings({"nls"})
private Operator genGroupByPlan1MR(String dest, QB qb, Operator input)
throws SemanticException {
QBParseInfo parseInfo = qb.getParseInfo();
int numReducers = -1;
ObjectPair<List<ASTNode>, List<Integer>> grpByExprsGroupingSets =
getGroupByGroupingSetsForClause(parseInfo, dest);
List<ASTNode> grpByExprs = grpByExprsGroupingSets.getFirst();
List<Integer> groupingSets = grpByExprsGroupingSets.getSecond();
if (grpByExprs.isEmpty()) {
numReducers = 1;
}
// Grouping sets are not allowed
if (!groupingSets.isEmpty()) {
throw new SemanticException(ErrorMsg.HIVE_GROUPING_SETS_AGGR_NOMAPAGGR.getMsg());
}
// ////// 1. Generate ReduceSinkOperator
ReduceSinkOperator reduceSinkOperatorInfo =
genGroupByPlanReduceSinkOperator(qb,
dest,
input,
grpByExprs,
grpByExprs.size(),
false,
numReducers,
false,
false);
// ////// 2. Generate GroupbyOperator
Operator groupByOperatorInfo = genGroupByPlanGroupByOperator(parseInfo,
dest, reduceSinkOperatorInfo, reduceSinkOperatorInfo, GroupByDesc.Mode.COMPLETE, null);
return groupByOperatorInfo;
}
@SuppressWarnings({"nls"})
private Operator genGroupByPlan1ReduceMultiGBY(List<String> dests, QB qb, Operator input,
Map<String, Operator> aliasToOpInfo)
throws SemanticException {
QBParseInfo parseInfo = qb.getParseInfo();
ExprNodeDesc previous = null;
Operator selectInput = input;
// In order to facilitate partition pruning, or the where clauses together and put them at the
// top of the operator tree, this could also reduce the amount of data going to the reducer
List<ExprNodeDesc.ExprNodeDescEqualityWrapper> whereExpressions =
new ArrayList<ExprNodeDesc.ExprNodeDescEqualityWrapper>();
for (String dest : dests) {
ASTNode whereExpr = parseInfo.getWhrForClause(dest);
if (whereExpr != null) {
OpParseContext inputCtx = opParseCtx.get(input);
RowResolver inputRR = inputCtx.getRowResolver();
ExprNodeDesc current = genExprNodeDesc((ASTNode) whereExpr.getChild(0), inputRR);
// Check the list of where expressions already added so they aren't duplicated
ExprNodeDesc.ExprNodeDescEqualityWrapper currentWrapped =
new ExprNodeDesc.ExprNodeDescEqualityWrapper(current);
if (!whereExpressions.contains(currentWrapped)) {
whereExpressions.add(currentWrapped);
} else {
continue;
}
if (previous == null) {
// If this is the first expression
previous = current;
continue;
}
GenericUDFOPOr or = new GenericUDFOPOr();
List<ExprNodeDesc> expressions = new ArrayList<ExprNodeDesc>(2);
expressions.add(previous);
expressions.add(current);
ExprNodeDesc orExpr =
new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, or, expressions);
previous = orExpr;
} else {
// If an expression does not have a where clause, there can be no common filter
previous = null;
break;
}
}
if (previous != null) {
OpParseContext inputCtx = opParseCtx.get(input);
RowResolver inputRR = inputCtx.getRowResolver();
FilterDesc orFilterDesc = new FilterDesc(previous, false);
selectInput = putOpInsertMap(OperatorFactory.getAndMakeChild(
orFilterDesc, new RowSchema(
inputRR.getColumnInfos()), input), inputRR);
}
// insert a select operator here used by the ColumnPruner to reduce
// the data to shuffle
Operator select = insertSelectAllPlanForGroupBy(selectInput);
// Generate ReduceSinkOperator
ReduceSinkOperator reduceSinkOperatorInfo =
genCommonGroupByPlanReduceSinkOperator(qb, dests, select);
// It is assumed throughout the code that a reducer has a single child, add a
// ForwardOperator so that we can add multiple filter/group by operators as children
RowResolver reduceSinkOperatorInfoRR = opParseCtx.get(reduceSinkOperatorInfo).getRowResolver();
Operator forwardOp = putOpInsertMap(OperatorFactory.getAndMakeChild(new ForwardDesc(),
new RowSchema(reduceSinkOperatorInfoRR.getColumnInfos()), reduceSinkOperatorInfo),
reduceSinkOperatorInfoRR);
Operator curr = forwardOp;
for (String dest : dests) {
curr = forwardOp;
if (parseInfo.getWhrForClause(dest) != null) {
ASTNode whereExpr = qb.getParseInfo().getWhrForClause(dest);
curr = genFilterPlan((ASTNode) whereExpr.getChild(0), qb, forwardOp, aliasToOpInfo, false);
}
// Generate GroupbyOperator
Operator groupByOperatorInfo = genGroupByPlanGroupByOperator(parseInfo,
dest, curr, reduceSinkOperatorInfo, GroupByDesc.Mode.COMPLETE, null);
curr = genPostGroupByBodyPlan(groupByOperatorInfo, dest, qb, aliasToOpInfo);
}
return curr;
}
static ArrayList<GenericUDAFEvaluator> getUDAFEvaluators(
ArrayList<AggregationDesc> aggs) {
ArrayList<GenericUDAFEvaluator> result = new ArrayList<GenericUDAFEvaluator>();
for (int i = 0; i < aggs.size(); i++) {
result.add(aggs.get(i).getGenericUDAFEvaluator());
}
return result;
}
/**
* Generate a Multi Group-By plan using a 2 map-reduce jobs.
*
* @param dest
* @param qb
* @param input
* @return
* @throws SemanticException
*
* Generate a Group-By plan using a 2 map-reduce jobs. Spray by the
* distinct key in hope of getting a uniform distribution, and
* compute partial aggregates by the grouping key. Evaluate partial
* aggregates first, and spray by the grouping key to compute actual
* aggregates in the second phase. The aggregation evaluation
* functions are as follows: Partitioning Key: distinct key
*
* Sorting Key: distinct key
*
* Reducer: iterate/terminatePartial (mode = PARTIAL1)
*
* STAGE 2
*
* Partitioning Key: grouping key
*
* Sorting Key: grouping key
*
* Reducer: merge/terminate (mode = FINAL)
*/
@SuppressWarnings("nls")
private Operator genGroupByPlan2MRMultiGroupBy(String dest, QB qb,
Operator input) throws SemanticException {
// ////// Generate GroupbyOperator for a map-side partial aggregation
Map<String, GenericUDAFEvaluator> genericUDAFEvaluators =
new LinkedHashMap<String, GenericUDAFEvaluator>();
QBParseInfo parseInfo = qb.getParseInfo();
// ////// 2. Generate GroupbyOperator
Operator groupByOperatorInfo = genGroupByPlanGroupByOperator1(parseInfo,
dest, input, GroupByDesc.Mode.HASH, genericUDAFEvaluators, true,
null, false, false);
int numReducers = -1;
List<ASTNode> grpByExprs = getGroupByForClause(parseInfo, dest);
// ////// 3. Generate ReduceSinkOperator2
Operator reduceSinkOperatorInfo2 = genGroupByPlanReduceSinkOperator2MR(
parseInfo, dest, groupByOperatorInfo, grpByExprs.size(), numReducers, false);
// ////// 4. Generate GroupbyOperator2
Operator groupByOperatorInfo2 = genGroupByPlanGroupByOperator2MR(parseInfo,
dest, reduceSinkOperatorInfo2, GroupByDesc.Mode.FINAL,
genericUDAFEvaluators, false);
return groupByOperatorInfo2;
}
/**
* Generate a Group-By plan using a 2 map-reduce jobs (5 operators will be
* inserted):
*
* ReduceSink ( keys = (K1_EXP, K2_EXP, DISTINCT_EXP), values = (A1_EXP,
* A2_EXP) ) NOTE: If DISTINCT_EXP is null, partition by rand() SortGroupBy
* (keys = (KEY.0,KEY.1), aggregations = (count_distinct(KEY.2), sum(VALUE.0),
* count(VALUE.1))) ReduceSink ( keys = (0,1), values=(2,3,4)) SortGroupBy
* (keys = (KEY.0,KEY.1), aggregations = (sum(VALUE.0), sum(VALUE.1),
* sum(VALUE.2))) Select (final selects).
*
* @param dest
* @param qb
* @param input
* @return
* @throws SemanticException
*
* Generate a Group-By plan using a 2 map-reduce jobs. Spray by the
* grouping key and distinct key (or a random number, if no distinct
* is present) in hope of getting a uniform distribution, and
* compute partial aggregates grouped by the reduction key (grouping
* key + distinct key). Evaluate partial aggregates first, and spray
* by the grouping key to compute actual aggregates in the second
* phase. The aggregation evaluation functions are as follows:
* Partitioning Key: random() if no DISTINCT grouping + distinct key
* if DISTINCT
*
* Sorting Key: grouping key if no DISTINCT grouping + distinct key
* if DISTINCT
*
* Reducer: iterate/terminatePartial (mode = PARTIAL1)
*
* STAGE 2
*
* Partitioning Key: grouping key
*
* Sorting Key: grouping key if no DISTINCT grouping + distinct key
* if DISTINCT
*
* Reducer: merge/terminate (mode = FINAL)
*/
@SuppressWarnings("nls")
private Operator genGroupByPlan2MR(String dest, QB qb, Operator input)
throws SemanticException {
QBParseInfo parseInfo = qb.getParseInfo();
ObjectPair<List<ASTNode>, List<Integer>> grpByExprsGroupingSets =
getGroupByGroupingSetsForClause(parseInfo, dest);
List<ASTNode> grpByExprs = grpByExprsGroupingSets.getFirst();
List<Integer> groupingSets = grpByExprsGroupingSets.getSecond();
// Grouping sets are not allowed
// This restriction can be lifted in future.
// HIVE-3508 has been filed for this
if (!groupingSets.isEmpty()) {
throw new SemanticException(ErrorMsg.HIVE_GROUPING_SETS_AGGR_NOMAPAGGR.getMsg());
}
// ////// 1. Generate ReduceSinkOperator
// There is a special case when we want the rows to be randomly distributed
// to
// reducers for load balancing problem. That happens when there is no
// DISTINCT
// operator. We set the numPartitionColumns to -1 for this purpose. This is
// captured by WritableComparableHiveObject.hashCode() function.
ReduceSinkOperator reduceSinkOperatorInfo =
genGroupByPlanReduceSinkOperator(qb,
dest,
input,
grpByExprs,
(parseInfo.getDistinctFuncExprsForClause(dest).isEmpty() ? -1 : Integer.MAX_VALUE),
false,
-1,
false,
false);
// ////// 2. Generate GroupbyOperator
Map<String, GenericUDAFEvaluator> genericUDAFEvaluators =
new LinkedHashMap<String, GenericUDAFEvaluator>();
GroupByOperator groupByOperatorInfo = (GroupByOperator) genGroupByPlanGroupByOperator(
parseInfo, dest, reduceSinkOperatorInfo, reduceSinkOperatorInfo, GroupByDesc.Mode.PARTIAL1,
genericUDAFEvaluators);
int numReducers = -1;
if (grpByExprs.isEmpty()) {
numReducers = 1;
}
// ////// 3. Generate ReduceSinkOperator2
Operator reduceSinkOperatorInfo2 = genGroupByPlanReduceSinkOperator2MR(
parseInfo, dest, groupByOperatorInfo, grpByExprs.size(), numReducers, false);
// ////// 4. Generate GroupbyOperator2
Operator groupByOperatorInfo2 = genGroupByPlanGroupByOperator2MR(parseInfo,
dest, reduceSinkOperatorInfo2, GroupByDesc.Mode.FINAL,
genericUDAFEvaluators, false);
return groupByOperatorInfo2;
}
private boolean optimizeMapAggrGroupBy(String dest, QB qb) {
List<ASTNode> grpByExprs = getGroupByForClause(qb.getParseInfo(), dest);
if ((grpByExprs != null) && !grpByExprs.isEmpty()) {
return false;
}
if (!qb.getParseInfo().getDistinctFuncExprsForClause(dest).isEmpty()) {
return false;
}
return true;
}
static private void extractColumns(Set<String> colNamesExprs,
ExprNodeDesc exprNode) throws SemanticException {
if (exprNode instanceof ExprNodeColumnDesc) {
colNamesExprs.add(((ExprNodeColumnDesc) exprNode).getColumn());
return;
}
if (exprNode instanceof ExprNodeGenericFuncDesc) {
ExprNodeGenericFuncDesc funcDesc = (ExprNodeGenericFuncDesc) exprNode;
for (ExprNodeDesc childExpr : funcDesc.getChildren()) {
extractColumns(colNamesExprs, childExpr);
}
}
}
static private boolean hasCommonElement(Set<String> set1, Set<String> set2) {
for (String elem1 : set1) {
if (set2.contains(elem1)) {
return true;
}
}
return false;
}
private void checkExpressionsForGroupingSet(List<ASTNode> grpByExprs,
List<ASTNode> distinctGrpByExprs,
Map<String, ASTNode> aggregationTrees,
RowResolver inputRowResolver) throws SemanticException {
Set<String> colNamesGroupByExprs = new HashSet<String>();
Set<String> colNamesGroupByDistinctExprs = new HashSet<String>();
Set<String> colNamesAggregateParameters = new HashSet<String>();
// The columns in the group by expressions should not intersect with the columns in the
// distinct expressions
for (ASTNode grpByExpr : grpByExprs) {
extractColumns(colNamesGroupByExprs, genExprNodeDesc(grpByExpr, inputRowResolver));
}
// If there is a distinctFuncExp, add all parameters to the reduceKeys.
if (!distinctGrpByExprs.isEmpty()) {
for (ASTNode value : distinctGrpByExprs) {
// 0 is function name
for (int i = 1; i < value.getChildCount(); i++) {
ASTNode parameter = (ASTNode) value.getChild(i);
ExprNodeDesc distExprNode = genExprNodeDesc(parameter, inputRowResolver);
// extract all the columns
extractColumns(colNamesGroupByDistinctExprs, distExprNode);
}
if (hasCommonElement(colNamesGroupByExprs, colNamesGroupByDistinctExprs)) {
throw new SemanticException(ErrorMsg.HIVE_GROUPING_SETS_AGGR_EXPRESSION_INVALID.getMsg());
}
}
}
for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
ASTNode value = entry.getValue();
ArrayList<ExprNodeDesc> aggParameters = new ArrayList<ExprNodeDesc>();
// 0 is the function name
for (int i = 1; i < value.getChildCount(); i++) {
ASTNode paraExpr = (ASTNode) value.getChild(i);
ExprNodeDesc paraExprNode = genExprNodeDesc(paraExpr, inputRowResolver);
// extract all the columns
extractColumns(colNamesAggregateParameters, paraExprNode);
}
if (hasCommonElement(colNamesGroupByExprs, colNamesAggregateParameters)) {
throw new SemanticException(ErrorMsg.HIVE_GROUPING_SETS_AGGR_EXPRESSION_INVALID.getMsg());
}
}
}
/**
* Generate a Group-By plan using 1 map-reduce job. First perform a map-side
* partial aggregation (to reduce the amount of data), at this point of time,
* we may turn off map-side partial aggregation based on its performance. Then
* spray by the group by key, and sort by the distinct key (if any), and
* compute aggregates based on actual aggregates
*
* The aggregation evaluation functions are as follows:
*
* No grouping sets:
* Group By Operator:
* grouping keys: group by expressions if no DISTINCT
* grouping keys: group by expressions + distinct keys if DISTINCT
* Mapper: iterate/terminatePartial (mode = HASH)
* Partitioning Key: grouping key
* Sorting Key: grouping key if no DISTINCT
* grouping + distinct key if DISTINCT
* Reducer: iterate/terminate if DISTINCT
* merge/terminate if NO DISTINCT (mode MERGEPARTIAL)
*
* Grouping Sets:
* Group By Operator:
* grouping keys: group by expressions + grouping id. if no DISTINCT
* grouping keys: group by expressions + grouping id. + distinct keys if DISTINCT
* Mapper: iterate/terminatePartial (mode = HASH)
* Partitioning Key: grouping key + grouping id.
* Sorting Key: grouping key + grouping id. if no DISTINCT
* grouping + grouping id. + distinct key if DISTINCT
* Reducer: iterate/terminate if DISTINCT
* merge/terminate if NO DISTINCT (mode MERGEPARTIAL)
*
* Grouping Sets with an additional MR job introduced (distincts are not allowed):
* Group By Operator:
* grouping keys: group by expressions
* Mapper: iterate/terminatePartial (mode = HASH)
* Partitioning Key: grouping key
* Sorting Key: grouping key
* Reducer: merge/terminate (mode MERGEPARTIAL)
* Group by Operator:
* grouping keys: group by expressions + add a new grouping id. key
*
* STAGE 2
* Partitioning Key: grouping key + grouping id.
* Sorting Key: grouping key + grouping id.
* Reducer: merge/terminate (mode = FINAL)
* Group by Operator:
* grouping keys: group by expressions + grouping id.
*/
@SuppressWarnings("nls")
private Operator genGroupByPlanMapAggrNoSkew(String dest, QB qb,
Operator inputOperatorInfo) throws SemanticException {
QBParseInfo parseInfo = qb.getParseInfo();
ObjectPair<List<ASTNode>, List<Integer>> grpByExprsGroupingSets =
getGroupByGroupingSetsForClause(parseInfo, dest);
List<ASTNode> grpByExprs = grpByExprsGroupingSets.getFirst();
List<Integer> groupingSets = grpByExprsGroupingSets.getSecond();
boolean groupingSetsPresent = !groupingSets.isEmpty();
int newMRJobGroupingSetsThreshold =
conf.getIntVar(HiveConf.ConfVars.HIVE_NEW_JOB_GROUPING_SET_CARDINALITY);
if (groupingSetsPresent) {
checkExpressionsForGroupingSet(grpByExprs,
parseInfo.getDistinctFuncExprsForClause(dest),
parseInfo.getAggregationExprsForClause(dest),
opParseCtx.get(inputOperatorInfo).getRowResolver());
}
// ////// Generate GroupbyOperator for a map-side partial aggregation
Map<String, GenericUDAFEvaluator> genericUDAFEvaluators =
new LinkedHashMap<String, GenericUDAFEvaluator>();
// Is the grouping sets data consumed in the current in MR job, or
// does it need an additional MR job
boolean groupingSetsNeedAdditionalMRJob =
groupingSetsPresent && groupingSets.size() > newMRJobGroupingSetsThreshold ?
true : false;
GroupByOperator groupByOperatorInfo =
(GroupByOperator) genGroupByPlanMapGroupByOperator(
qb,
dest,
grpByExprs,
inputOperatorInfo,
GroupByDesc.Mode.HASH,
genericUDAFEvaluators,
groupingSets,
groupingSetsPresent && !groupingSetsNeedAdditionalMRJob);
groupOpToInputTables.put(groupByOperatorInfo, opParseCtx.get(
inputOperatorInfo).getRowResolver().getTableNames());
int numReducers = -1;
// Optimize the scenario when there are no grouping keys - only 1 reducer is
// needed
if (grpByExprs.isEmpty()) {
numReducers = 1;
}
// ////// Generate ReduceSink Operator
boolean isDistinct = !qb.getParseInfo().getDistinctFuncExprsForClause(dest).isEmpty();
// Distincts are not allowed with an additional mr job
if (groupingSetsNeedAdditionalMRJob && isDistinct) {
String errorMsg = "The number of rows per input row due to grouping sets is "
+ groupingSets.size();
throw new SemanticException(
ErrorMsg.HIVE_GROUPING_SETS_THRESHOLD_NOT_ALLOWED_WITH_DISTINCTS.getMsg(errorMsg));
}
Operator reduceSinkOperatorInfo =
genGroupByPlanReduceSinkOperator(qb,
dest,
groupByOperatorInfo,
grpByExprs,
grpByExprs.size(),
true,
numReducers,
true,
groupingSetsPresent && !groupingSetsNeedAdditionalMRJob);
// Does it require a new MR job for grouping sets
if (!groupingSetsPresent || !groupingSetsNeedAdditionalMRJob) {
// This is a 1-stage map-reduce processing of the groupby. Tha map-side
// aggregates was just used to
// reduce output data. In case of distincts, partial results are not used,
// and so iterate is again
// invoked on the reducer. In case of non-distincts, partial results are
// used, and merge is invoked
// on the reducer.
return genGroupByPlanGroupByOperator1(parseInfo, dest,
reduceSinkOperatorInfo, GroupByDesc.Mode.MERGEPARTIAL,
genericUDAFEvaluators, false,
groupingSets, groupingSetsPresent, groupingSetsNeedAdditionalMRJob);
}
else
{
// Add 'n' rows corresponding to the grouping sets. For each row, create 'n' rows,
// one for each grouping set key. Since map-side aggregation has already been performed,
// the number of rows would have been reduced. Moreover, the rows corresponding to the
// grouping keys come together, so there is a higher chance of finding the rows in the hash
// table.
Operator groupByOperatorInfo2 =
genGroupByPlanGroupByOperator1(parseInfo, dest,
reduceSinkOperatorInfo, GroupByDesc.Mode.PARTIALS,
genericUDAFEvaluators, false,
groupingSets, groupingSetsPresent, groupingSetsNeedAdditionalMRJob);
// ////// Generate ReduceSinkOperator2
Operator reduceSinkOperatorInfo2 = genGroupByPlanReduceSinkOperator2MR(
parseInfo, dest, groupByOperatorInfo2, grpByExprs.size() + 1, numReducers,
groupingSetsPresent);
// ////// Generate GroupbyOperator3
return genGroupByPlanGroupByOperator2MR(parseInfo, dest,
reduceSinkOperatorInfo2, GroupByDesc.Mode.FINAL,
genericUDAFEvaluators, groupingSetsPresent);
}
}
/**
* Generate a Group-By plan using a 2 map-reduce jobs. However, only 1
* group-by plan is generated if the query involves no grouping key and no
* distincts. In that case, the plan is same as generated by
* genGroupByPlanMapAggr1MR. Otherwise, the following plan is generated: First
* perform a map side partial aggregation (to reduce the amount of data). Then
* spray by the grouping key and distinct key (or a random number, if no
* distinct is present) in hope of getting a uniform distribution, and compute
* partial aggregates grouped by the reduction key (grouping key + distinct
* key). Evaluate partial aggregates first, and spray by the grouping key to
* compute actual aggregates in the second phase.
*
* The aggregation evaluation functions are as follows:
*
* No grouping sets:
* STAGE 1
* Group by Operator:
* grouping keys: group by expressions if no DISTINCT
* grouping keys: group by expressions + distinct keys if DISTINCT
* Mapper: iterate/terminatePartial (mode = HASH)
* Partitioning Key: random() if no DISTINCT
* grouping + distinct key if DISTINCT
* Sorting Key: grouping key if no DISTINCT
* grouping + distinct key if DISTINCT
* Reducer: iterate/terminatePartial if DISTINCT
* merge/terminatePartial if NO DISTINCT (mode = MERGEPARTIAL)
* Group by Operator:
* grouping keys: group by expressions
*
* STAGE 2
* Partitioning Key: grouping key
* Sorting Key: grouping key
* Reducer: merge/terminate (mode = FINAL)
*
* In the presence of grouping sets, the aggregation evaluation functions are as follows:
* STAGE 1
* Group by Operator:
* grouping keys: group by expressions + grouping id. if no DISTINCT
* grouping keys: group by expressions + + grouping id. + distinct keys if DISTINCT
* Mapper: iterate/terminatePartial (mode = HASH)
* Partitioning Key: random() if no DISTINCT
* grouping + grouping id. + distinct key if DISTINCT
* Sorting Key: grouping key + grouping id. if no DISTINCT
* grouping + grouping id. + distinct key if DISTINCT
* Reducer: iterate/terminatePartial if DISTINCT
* merge/terminatePartial if NO DISTINCT (mode = MERGEPARTIAL)
* Group by Operator:
* grouping keys: group by expressions + grouping id.
*
* STAGE 2
* Partitioning Key: grouping key
* Sorting Key: grouping key + grouping id.
* Reducer: merge/terminate (mode = FINAL)
*/
@SuppressWarnings("nls")
private Operator genGroupByPlanMapAggr2MR(String dest, QB qb,
Operator inputOperatorInfo) throws SemanticException {
QBParseInfo parseInfo = qb.getParseInfo();
ObjectPair<List<ASTNode>, List<Integer>> grpByExprsGroupingSets =
getGroupByGroupingSetsForClause(parseInfo, dest);
List<ASTNode> grpByExprs = grpByExprsGroupingSets.getFirst();
List<Integer> groupingSets = grpByExprsGroupingSets.getSecond();
boolean groupingSetsPresent = !groupingSets.isEmpty();
if (groupingSetsPresent) {
checkExpressionsForGroupingSet(grpByExprs,
parseInfo.getDistinctFuncExprsForClause(dest),
parseInfo.getAggregationExprsForClause(dest),
opParseCtx.get(inputOperatorInfo).getRowResolver());
int newMRJobGroupingSetsThreshold =
conf.getIntVar(HiveConf.ConfVars.HIVE_NEW_JOB_GROUPING_SET_CARDINALITY);
// Turn off skew if an additional MR job is required anyway for grouping sets.
if (groupingSets.size() > newMRJobGroupingSetsThreshold) {
String errorMsg = "The number of rows per input row due to grouping sets is "
+ groupingSets.size();
throw new SemanticException(
ErrorMsg.HIVE_GROUPING_SETS_THRESHOLD_NOT_ALLOWED_WITH_SKEW.getMsg(errorMsg));
}
}
// ////// Generate GroupbyOperator for a map-side partial aggregation
Map<String, GenericUDAFEvaluator> genericUDAFEvaluators =
new LinkedHashMap<String, GenericUDAFEvaluator>();
GroupByOperator groupByOperatorInfo =
(GroupByOperator) genGroupByPlanMapGroupByOperator(
qb, dest, grpByExprs, inputOperatorInfo, GroupByDesc.Mode.HASH,
genericUDAFEvaluators, groupingSets, groupingSetsPresent);
groupOpToInputTables.put(groupByOperatorInfo, opParseCtx.get(
inputOperatorInfo).getRowResolver().getTableNames());
// Optimize the scenario when there are no grouping keys and no distinct - 2
// map-reduce jobs are not needed
// For eg: select count(1) from T where t.ds = ....
if (!optimizeMapAggrGroupBy(dest, qb)) {
List<ASTNode> distinctFuncExprs = parseInfo.getDistinctFuncExprsForClause(dest);
// ////// Generate ReduceSink Operator
Operator reduceSinkOperatorInfo =
genGroupByPlanReduceSinkOperator(qb,
dest,
groupByOperatorInfo,
grpByExprs,
distinctFuncExprs.isEmpty() ? -1 : Integer.MAX_VALUE,
false,
-1,
true,
groupingSetsPresent);
// ////// Generate GroupbyOperator for a partial aggregation
Operator groupByOperatorInfo2 = genGroupByPlanGroupByOperator1(parseInfo,
dest, reduceSinkOperatorInfo, GroupByDesc.Mode.PARTIALS,
genericUDAFEvaluators, false,
groupingSets, groupingSetsPresent, false);
int numReducers = -1;
if (grpByExprs.isEmpty()) {
numReducers = 1;
}
// ////// Generate ReduceSinkOperator2
Operator reduceSinkOperatorInfo2 = genGroupByPlanReduceSinkOperator2MR(
parseInfo, dest, groupByOperatorInfo2, grpByExprs.size(), numReducers,
groupingSetsPresent);
// ////// Generate GroupbyOperator3
return genGroupByPlanGroupByOperator2MR(parseInfo, dest,
reduceSinkOperatorInfo2, GroupByDesc.Mode.FINAL,
genericUDAFEvaluators, groupingSetsPresent);
} else {
// If there are no grouping keys, grouping sets cannot be present
assert !groupingSetsPresent;
// ////// Generate ReduceSink Operator
Operator reduceSinkOperatorInfo =
genGroupByPlanReduceSinkOperator(qb,
dest,
groupByOperatorInfo,
grpByExprs,
grpByExprs.size(),
false,
1,
true,
groupingSetsPresent);
return genGroupByPlanGroupByOperator2MR(parseInfo, dest,
reduceSinkOperatorInfo, GroupByDesc.Mode.FINAL, genericUDAFEvaluators, false);
}
}
@SuppressWarnings("nls")
private Operator genConversionOps(String dest, QB qb, Operator input)
throws SemanticException {
Integer dest_type = qb.getMetaData().getDestTypeForAlias(dest);
switch (dest_type.intValue()) {
case QBMetaData.DEST_TABLE: {
qb.getMetaData().getDestTableForAlias(dest);
break;
}
case QBMetaData.DEST_PARTITION: {
qb.getMetaData().getDestPartitionForAlias(dest).getTable();
break;
}
default: {
return input;
}
}
return input;
}
private int getReducersBucketing(int totalFiles, int maxReducers) {
int numFiles = (int)Math.ceil((double)totalFiles / (double)maxReducers);
while (true) {
if (totalFiles % numFiles == 0) {
return totalFiles / numFiles;
}
numFiles++;
}
}
private static class SortBucketRSCtx {
ArrayList<ExprNodeDesc> partnCols;
boolean multiFileSpray;
int numFiles;
int totalFiles;
public SortBucketRSCtx() {
partnCols = null;
multiFileSpray = false;
numFiles = 1;
totalFiles = 1;
}
/**
* @return the partnCols
*/
public ArrayList<ExprNodeDesc> getPartnCols() {
return partnCols;
}
/**
* @param partnCols
* the partnCols to set
*/
public void setPartnCols(ArrayList<ExprNodeDesc> partnCols) {
this.partnCols = partnCols;
}
/**
* @return the multiFileSpray
*/
public boolean isMultiFileSpray() {
return multiFileSpray;
}
/**
* @param multiFileSpray
* the multiFileSpray to set
*/
public void setMultiFileSpray(boolean multiFileSpray) {
this.multiFileSpray = multiFileSpray;
}
/**
* @return the numFiles
*/
public int getNumFiles() {
return numFiles;
}
/**
* @param numFiles
* the numFiles to set
*/
public void setNumFiles(int numFiles) {
this.numFiles = numFiles;
}
/**
* @return the totalFiles
*/
public int getTotalFiles() {
return totalFiles;
}
/**
* @param totalFiles
* the totalFiles to set
*/
public void setTotalFiles(int totalFiles) {
this.totalFiles = totalFiles;
}
}
@SuppressWarnings("nls")
private Operator genBucketingSortingDest(String dest, Operator input, QB qb,
TableDesc table_desc, Table dest_tab, SortBucketRSCtx ctx) throws SemanticException {
// If the table is bucketed, and bucketing is enforced, do the following:
// If the number of buckets is smaller than the number of maximum reducers,
// create those many reducers.
// If not, create a multiFileSink instead of FileSink - the multiFileSink will
// spray the data into multiple buckets. That way, we can support a very large
// number of buckets without needing a very large number of reducers.
boolean enforceBucketing = false;
boolean enforceSorting = false;
ArrayList<ExprNodeDesc> partnCols = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> partnColsNoConvert = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> sortCols = new ArrayList<ExprNodeDesc>();
ArrayList<Integer> sortOrders = new ArrayList<Integer>();
boolean multiFileSpray = false;
int numFiles = 1;
int totalFiles = 1;
if ((dest_tab.getNumBuckets() > 0) &&
(conf.getBoolVar(HiveConf.ConfVars.HIVEENFORCEBUCKETING))) {
enforceBucketing = true;
partnCols = getPartitionColsFromBucketCols(dest, qb, dest_tab, table_desc, input, true);
partnColsNoConvert = getPartitionColsFromBucketCols(dest, qb, dest_tab, table_desc, input,
false);
}
if ((dest_tab.getSortCols() != null) &&
(dest_tab.getSortCols().size() > 0) &&
(conf.getBoolVar(HiveConf.ConfVars.HIVEENFORCESORTING))) {
enforceSorting = true;
sortCols = getSortCols(dest, qb, dest_tab, table_desc, input, true);
sortOrders = getSortOrders(dest, qb, dest_tab, input);
if (!enforceBucketing) {
partnCols = sortCols;
partnColsNoConvert = getSortCols(dest, qb, dest_tab, table_desc, input, false);
}
}
if (enforceBucketing || enforceSorting) {
int maxReducers = conf.getIntVar(HiveConf.ConfVars.MAXREDUCERS);
if (conf.getIntVar(HiveConf.ConfVars.HADOOPNUMREDUCERS) > 0) {
maxReducers = conf.getIntVar(HiveConf.ConfVars.HADOOPNUMREDUCERS);
}
int numBuckets = dest_tab.getNumBuckets();
if (numBuckets > maxReducers) {
multiFileSpray = true;
totalFiles = numBuckets;
if (totalFiles % maxReducers == 0) {
numFiles = totalFiles / maxReducers;
}
else {
// find the number of reducers such that it is a divisor of totalFiles
maxReducers = getReducersBucketing(totalFiles, maxReducers);
numFiles = totalFiles / maxReducers;
}
}
else {
maxReducers = numBuckets;
}
input = genReduceSinkPlanForSortingBucketing(dest_tab, input,
sortCols, sortOrders, partnCols, maxReducers);
ctx.setMultiFileSpray(multiFileSpray);
ctx.setNumFiles(numFiles);
ctx.setPartnCols(partnColsNoConvert);
ctx.setTotalFiles(totalFiles);
}
return input;
}
/**
* Check for HOLD_DDLTIME hint.
*
* @param qb
* @return true if HOLD_DDLTIME is set, false otherwise.
*/
private boolean checkHoldDDLTime(QB qb) {
ASTNode hints = qb.getParseInfo().getHints();
if (hints == null) {
return false;
}
for (int pos = 0; pos < hints.getChildCount(); pos++) {
ASTNode hint = (ASTNode) hints.getChild(pos);
if (((ASTNode) hint.getChild(0)).getToken().getType() == HiveParser.TOK_HOLD_DDLTIME) {
return true;
}
}
return false;
}
@SuppressWarnings("nls")
private Operator genFileSinkPlan(String dest, QB qb, Operator input)
throws SemanticException {
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
QBMetaData qbm = qb.getMetaData();
Integer dest_type = qbm.getDestTypeForAlias(dest);
Table dest_tab = null; // destination table if any
Partition dest_part = null;// destination partition if any
Path queryTmpdir = null; // the intermediate destination directory
Path dest_path = null; // the final destination directory
TableDesc table_desc = null;
int currentTableId = 0;
boolean isLocal = false;
SortBucketRSCtx rsCtx = new SortBucketRSCtx();
DynamicPartitionCtx dpCtx = null;
LoadTableDesc ltd = null;
boolean holdDDLTime = checkHoldDDLTime(qb);
ListBucketingCtx lbCtx = null;
switch (dest_type.intValue()) {
case QBMetaData.DEST_TABLE: {
dest_tab = qbm.getDestTableForAlias(dest);
// Is the user trying to insert into a external tables
if ((!conf.getBoolVar(HiveConf.ConfVars.HIVE_INSERT_INTO_EXTERNAL_TABLES)) &&
(dest_tab.getTableType().equals(TableType.EXTERNAL_TABLE))) {
throw new SemanticException(
ErrorMsg.INSERT_EXTERNAL_TABLE.getMsg(dest_tab.getTableName()));
}
Map<String, String> partSpec = qbm.getPartSpecForAlias(dest);
dest_path = dest_tab.getPath();
// If the query here is an INSERT_INTO and the target is an immutable table,
// verify that our destination is empty before proceeding
if (dest_tab.isImmutable() &&
qb.getParseInfo().isInsertIntoTable(dest_tab.getDbName(),dest_tab.getTableName())){
try {
FileSystem fs = dest_path.getFileSystem(conf);
if (! MetaStoreUtils.isDirEmpty(fs,dest_path)){
LOG.warn("Attempted write into an immutable table : "
+ dest_tab.getTableName() + " : " + dest_path);
throw new SemanticException(
ErrorMsg.INSERT_INTO_IMMUTABLE_TABLE.getMsg(dest_tab.getTableName()));
}
} catch (IOException ioe) {
LOG.warn("Error while trying to determine if immutable table has any data : "
+ dest_tab.getTableName() + " : " + dest_path);
throw new SemanticException(ErrorMsg.INSERT_INTO_IMMUTABLE_TABLE.getMsg(ioe.getMessage()));
}
}
// check for partition
List<FieldSchema> parts = dest_tab.getPartitionKeys();
if (parts != null && parts.size() > 0) { // table is partitioned
if (partSpec == null || partSpec.size() == 0) { // user did NOT specify partition
throw new SemanticException(generateErrorMessage(
qb.getParseInfo().getDestForClause(dest),
ErrorMsg.NEED_PARTITION_ERROR.getMsg()));
}
// the HOLD_DDLTIIME hint should not be used with dynamic partition since the
// newly generated partitions should always update their DDLTIME
if (holdDDLTime) {
throw new SemanticException(generateErrorMessage(
qb.getParseInfo().getDestForClause(dest),
ErrorMsg.HOLD_DDLTIME_ON_NONEXIST_PARTITIONS.getMsg()));
}
dpCtx = qbm.getDPCtx(dest);
if (dpCtx == null) {
dest_tab.validatePartColumnNames(partSpec, false);
dpCtx = new DynamicPartitionCtx(dest_tab, partSpec,
conf.getVar(HiveConf.ConfVars.DEFAULTPARTITIONNAME),
conf.getIntVar(HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTSPERNODE));
qbm.setDPCtx(dest, dpCtx);
}
if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.DYNAMICPARTITIONING)) { // allow DP
throw new SemanticException(generateErrorMessage(
qb.getParseInfo().getDestForClause(dest),
ErrorMsg.DYNAMIC_PARTITION_DISABLED.getMsg()));
}
if (dpCtx.getSPPath() != null) {
dest_path = new Path(dest_tab.getPath(), dpCtx.getSPPath());
}
if ((dest_tab.getNumBuckets() > 0) &&
(conf.getBoolVar(HiveConf.ConfVars.HIVEENFORCEBUCKETING))) {
dpCtx.setNumBuckets(dest_tab.getNumBuckets());
}
}
boolean isNonNativeTable = dest_tab.isNonNative();
if (isNonNativeTable) {
queryTmpdir = dest_path;
} else {
// if we are on viewfs we don't want to use /tmp as tmp dir since rename from /tmp/..
// to final /user/hive/warehouse/ will fail later, so instead pick tmp dir
// on same namespace as tbl dir.
queryTmpdir = dest_path.toUri().getScheme().equals("viewfs") ?
ctx.getExtTmpPathRelTo(dest_path.getParent().toUri()) :
ctx.getExternalTmpPath(dest_path.toUri());
}
if (dpCtx != null) {
// set the root of the temporary path where dynamic partition columns will populate
dpCtx.setRootPath(queryTmpdir);
}
// this table_desc does not contain the partitioning columns
table_desc = Utilities.getTableDesc(dest_tab);
// Add sorting/bucketing if needed
input = genBucketingSortingDest(dest, input, qb, table_desc, dest_tab, rsCtx);
idToTableNameMap.put(String.valueOf(destTableId), dest_tab.getTableName());
currentTableId = destTableId;
destTableId++;
lbCtx = constructListBucketingCtx(dest_tab.getSkewedColNames(),
dest_tab.getSkewedColValues(), dest_tab.getSkewedColValueLocationMaps(),
dest_tab.isStoredAsSubDirectories(), conf);
// Create the work for moving the table
// NOTE: specify Dynamic partitions in dest_tab for WriteEntity
if (!isNonNativeTable) {
ltd = new LoadTableDesc(queryTmpdir,table_desc, dpCtx);
ltd.setReplace(!qb.getParseInfo().isInsertIntoTable(dest_tab.getDbName(),
dest_tab.getTableName()));
ltd.setLbCtx(lbCtx);
if (holdDDLTime) {
LOG.info("this query will not update transient_lastDdlTime!");
ltd.setHoldDDLTime(true);
}
loadTableWork.add(ltd);
}
WriteEntity output = null;
// Here only register the whole table for post-exec hook if no DP present
// in the case of DP, we will register WriteEntity in MoveTask when the
// list of dynamically created partitions are known.
if ((dpCtx == null || dpCtx.getNumDPCols() == 0)) {
output = new WriteEntity(dest_tab, determineWriteType(ltd, isNonNativeTable));
if (!outputs.add(output)) {
throw new SemanticException(ErrorMsg.OUTPUT_SPECIFIED_MULTIPLE_TIMES
.getMsg(dest_tab.getTableName()));
}
}
if ((dpCtx != null) && (dpCtx.getNumDPCols() >= 0)) {
// No static partition specified
if (dpCtx.getNumSPCols() == 0) {
output = new WriteEntity(dest_tab, determineWriteType(ltd, isNonNativeTable), false);
outputs.add(output);
}
// part of the partition specified
// Create a DummyPartition in this case. Since, the metastore does not store partial
// partitions currently, we need to store dummy partitions
else {
try {
String ppath = dpCtx.getSPPath();
ppath = ppath.substring(0, ppath.length() - 1);
DummyPartition p =
new DummyPartition(dest_tab, dest_tab.getDbName()
+ "@" + dest_tab.getTableName() + "@" + ppath,
partSpec);
output = new WriteEntity(p, WriteEntity.WriteType.INSERT, false);
outputs.add(output);
} catch (HiveException e) {
throw new SemanticException(e.getMessage(), e);
}
}
}
ctx.getLoadTableOutputMap().put(ltd, output);
break;
}
case QBMetaData.DEST_PARTITION: {
dest_part = qbm.getDestPartitionForAlias(dest);
dest_tab = dest_part.getTable();
if ((!conf.getBoolVar(HiveConf.ConfVars.HIVE_INSERT_INTO_EXTERNAL_TABLES)) &&
dest_tab.getTableType().equals(TableType.EXTERNAL_TABLE)) {
throw new SemanticException(
ErrorMsg.INSERT_EXTERNAL_TABLE.getMsg(dest_tab.getTableName()));
}
Path tabPath = dest_tab.getPath();
Path partPath = dest_part.getDataLocation();
// If the query here is an INSERT_INTO and the target is an immutable table,
// verify that our destination is empty before proceeding
if (dest_tab.isImmutable() &&
qb.getParseInfo().isInsertIntoTable(dest_tab.getDbName(),dest_tab.getTableName())){
qb.getParseInfo().isInsertToTable();
try {
FileSystem fs = partPath.getFileSystem(conf);
if (! MetaStoreUtils.isDirEmpty(fs,partPath)){
LOG.warn("Attempted write into an immutable table partition : "
+ dest_tab.getTableName() + " : " + partPath);
throw new SemanticException(
ErrorMsg.INSERT_INTO_IMMUTABLE_TABLE.getMsg(dest_tab.getTableName()));
}
} catch (IOException ioe) {
LOG.warn("Error while trying to determine if immutable table partition has any data : "
+ dest_tab.getTableName() + " : " + partPath);
throw new SemanticException(ErrorMsg.INSERT_INTO_IMMUTABLE_TABLE.getMsg(ioe.getMessage()));
}
}
// if the table is in a different dfs than the partition,
// replace the partition's dfs with the table's dfs.
dest_path = new Path(tabPath.toUri().getScheme(), tabPath.toUri()
.getAuthority(), partPath.toUri().getPath());
// if we are on viewfs we don't want to use /tmp as tmp dir since rename from /tmp/..
// to final /user/hive/warehouse/ will fail later, so instead pick tmp dir
// on same namespace as tbl dir.
queryTmpdir = dest_path.toUri().getScheme().equals("viewfs") ?
ctx.getExtTmpPathRelTo(dest_path.getParent().toUri()) :
ctx.getExternalTmpPath(dest_path.toUri());
table_desc = Utilities.getTableDesc(dest_tab);
// Add sorting/bucketing if needed
input = genBucketingSortingDest(dest, input, qb, table_desc, dest_tab, rsCtx);
idToTableNameMap.put(String.valueOf(destTableId), dest_tab.getTableName());
currentTableId = destTableId;
destTableId++;
lbCtx = constructListBucketingCtx(dest_part.getSkewedColNames(),
dest_part.getSkewedColValues(), dest_part.getSkewedColValueLocationMaps(),
dest_part.isStoredAsSubDirectories(), conf);
ltd = new LoadTableDesc(queryTmpdir, table_desc, dest_part.getSpec());
ltd.setReplace(!qb.getParseInfo().isInsertIntoTable(dest_tab.getDbName(),
dest_tab.getTableName()));
ltd.setLbCtx(lbCtx);
if (holdDDLTime) {
try {
Partition part = db.getPartition(dest_tab, dest_part.getSpec(), false);
if (part == null) {
throw new SemanticException(generateErrorMessage(
qb.getParseInfo().getDestForClause(dest),
ErrorMsg.HOLD_DDLTIME_ON_NONEXIST_PARTITIONS.getMsg()));
}
} catch (HiveException e) {
throw new SemanticException(e);
}
LOG.info("this query will not update transient_lastDdlTime!");
ltd.setHoldDDLTime(true);
}
loadTableWork.add(ltd);
if (!outputs.add(new WriteEntity(dest_part, (ltd.getReplace() ?
WriteEntity.WriteType.INSERT_OVERWRITE :
WriteEntity.WriteType.INSERT)))) {
throw new SemanticException(ErrorMsg.OUTPUT_SPECIFIED_MULTIPLE_TIMES
.getMsg(dest_tab.getTableName() + "@" + dest_part.getName()));
}
break;
}
case QBMetaData.DEST_LOCAL_FILE:
isLocal = true;
// fall through
case QBMetaData.DEST_DFS_FILE: {
dest_path = new Path(qbm.getDestFileForAlias(dest));
if (isLocal) {
// for local directory - we always write to map-red intermediate
// store and then copy to local fs
queryTmpdir = ctx.getMRTmpPath();
} else {
// otherwise write to the file system implied by the directory
// no copy is required. we may want to revisit this policy in future
try {
Path qPath = FileUtils.makeQualified(dest_path, conf);
queryTmpdir = ctx.getExternalTmpPath(qPath.toUri());
} catch (Exception e) {
throw new SemanticException("Error creating temporary folder on: "
+ dest_path, e);
}
}
String cols = "";
String colTypes = "";
ArrayList<ColumnInfo> colInfos = inputRR.getColumnInfos();
// CTAS case: the file output format and serde are defined by the create
// table command
// rather than taking the default value
List<FieldSchema> field_schemas = null;
CreateTableDesc tblDesc = qb.getTableDesc();
if (tblDesc != null) {
field_schemas = new ArrayList<FieldSchema>();
}
boolean first = true;
for (ColumnInfo colInfo : colInfos) {
String[] nm = inputRR.reverseLookup(colInfo.getInternalName());
if (nm[1] != null) { // non-null column alias
colInfo.setAlias(nm[1]);
}
String colName = colInfo.getInternalName(); //default column name
if (field_schemas != null) {
FieldSchema col = new FieldSchema();
if (!("".equals(nm[0])) && nm[1] != null) {
colName = unescapeIdentifier(colInfo.getAlias()).toLowerCase(); // remove ``
}
col.setName(colName);;
col.setType(colInfo.getType().getTypeName());
field_schemas.add(col);
}
if (!first) {
cols = cols.concat(",");
colTypes = colTypes.concat(":");
}
first = false;
cols = cols.concat(colName);
// Replace VOID type with string when the output is a temp table or
// local files.
// A VOID type can be generated under the query:
//
// select NULL from tt;
// or
// insert overwrite local directory "abc" select NULL from tt;
//
// where there is no column type to which the NULL value should be
// converted.
//
String tName = colInfo.getType().getTypeName();
if (tName.equals(serdeConstants.VOID_TYPE_NAME)) {
colTypes = colTypes.concat(serdeConstants.STRING_TYPE_NAME);
} else {
colTypes = colTypes.concat(tName);
}
}
// update the create table descriptor with the resulting schema.
if (tblDesc != null) {
tblDesc.setCols(new ArrayList<FieldSchema>(field_schemas));
}
boolean isDestTempFile = true;
if (!ctx.isMRTmpFileURI(dest_path.toUri().toString())) {
idToTableNameMap.put(String.valueOf(destTableId), dest_path.toUri().toString());
currentTableId = destTableId;
destTableId++;
isDestTempFile = false;
}
boolean isDfsDir = (dest_type.intValue() == QBMetaData.DEST_DFS_FILE);
loadFileWork.add(new LoadFileDesc(tblDesc, queryTmpdir, dest_path, isDfsDir, cols,
colTypes));
if (tblDesc == null) {
if (qb.getIsQuery()) {
String fileFormat = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYRESULTFILEFORMAT);
table_desc = PlanUtils.getDefaultQueryOutputTableDesc(cols, colTypes, fileFormat);
} else {
table_desc = PlanUtils.getDefaultTableDesc(qb.getLLocalDirectoryDesc(), cols, colTypes);
}
} else {
table_desc = PlanUtils.getTableDesc(tblDesc, cols, colTypes);
}
if (!outputs.add(new WriteEntity(dest_path, !isDfsDir, isDestTempFile))) {
throw new SemanticException(ErrorMsg.OUTPUT_SPECIFIED_MULTIPLE_TIMES
.getMsg(dest_path.toUri().toString()));
}
break;
}
default:
throw new SemanticException("Unknown destination type: " + dest_type);
}
input = genConversionSelectOperator(dest, qb, input, table_desc, dpCtx);
inputRR = opParseCtx.get(input).getRowResolver();
ArrayList<ColumnInfo> vecCol = new ArrayList<ColumnInfo>();
try {
StructObjectInspector rowObjectInspector = (StructObjectInspector) table_desc
.getDeserializer().getObjectInspector();
List<? extends StructField> fields = rowObjectInspector
.getAllStructFieldRefs();
for (int i = 0; i < fields.size(); i++) {
vecCol.add(new ColumnInfo(fields.get(i).getFieldName(), TypeInfoUtils
.getTypeInfoFromObjectInspector(fields.get(i)
.getFieldObjectInspector()), "", false));
}
} catch (Exception e) {
throw new SemanticException(e.getMessage(), e);
}
RowSchema fsRS = new RowSchema(vecCol);
// The output files of a FileSink can be merged if they are either not being written to a table
// or are being written to a table which is either not bucketed or enforce bucketing is not set
// and table the table is either not sorted or enforce sorting is not set
boolean canBeMerged = (dest_tab == null || !((dest_tab.getNumBuckets() > 0 &&
conf.getBoolVar(HiveConf.ConfVars.HIVEENFORCEBUCKETING)) ||
(dest_tab.getSortCols() != null && dest_tab.getSortCols().size() > 0 &&
conf.getBoolVar(HiveConf.ConfVars.HIVEENFORCESORTING))));
FileSinkDesc fileSinkDesc = new FileSinkDesc(
queryTmpdir,
table_desc,
conf.getBoolVar(HiveConf.ConfVars.COMPRESSRESULT),
currentTableId,
rsCtx.isMultiFileSpray(),
canBeMerged,
rsCtx.getNumFiles(),
rsCtx.getTotalFiles(),
rsCtx.getPartnCols(),
dpCtx);
/* Set List Bucketing context. */
if (lbCtx != null) {
lbCtx.processRowSkewedIndex(fsRS);
lbCtx.calculateSkewedValueSubDirList();
}
fileSinkDesc.setLbCtx(lbCtx);
// set it in plan instead of runtime in FileSinkOperator
fileSinkDesc.setStatsCollectRawDataSize(HiveConf.getBoolVar(conf,
HiveConf.ConfVars.HIVE_STATS_COLLECT_RAWDATASIZE));
// set the stats publishing/aggregating key prefix
// the same as directory name. The directory name
// can be changed in the optimizer but the key should not be changed
// it should be the same as the MoveWork's sourceDir.
fileSinkDesc.setStatsAggPrefix(fileSinkDesc.getDirName().toString());
if (HiveConf.getVar(conf, HIVESTATSDBCLASS).equalsIgnoreCase(StatDB.fs.name())) {
String statsTmpLoc = ctx.getExternalTmpPath(queryTmpdir.toUri()).toString();
LOG.info("Set stats collection dir : " + statsTmpLoc);
conf.set(StatsSetupConst.STATS_TMP_LOC, statsTmpLoc);
}
if (dest_part != null) {
try {
String staticSpec = Warehouse.makePartPath(dest_part.getSpec());
fileSinkDesc.setStaticSpec(staticSpec);
} catch (MetaException e) {
throw new SemanticException(e);
}
} else if (dpCtx != null) {
fileSinkDesc.setStaticSpec(dpCtx.getSPPath());
}
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(fileSinkDesc,
fsRS, input), inputRR);
if (ltd != null && SessionState.get() != null) {
SessionState.get().getLineageState()
.mapDirToFop(ltd.getSourcePath(), (FileSinkOperator) output);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Created FileSink Plan for clause: " + dest + "dest_path: "
+ dest_path + " row schema: " + inputRR.toString());
}
fsopToTable.put((FileSinkOperator) output, dest_tab);
return output;
}
/**
* Generate the conversion SelectOperator that converts the columns into the
* types that are expected by the table_desc.
*/
Operator genConversionSelectOperator(String dest, QB qb, Operator input,
TableDesc table_desc, DynamicPartitionCtx dpCtx) throws SemanticException {
StructObjectInspector oi = null;
try {
Deserializer deserializer = table_desc.getDeserializerClass()
.newInstance();
SerDeUtils.initializeSerDe(deserializer, conf, table_desc.getProperties(), null);
oi = (StructObjectInspector) deserializer.getObjectInspector();
} catch (Exception e) {
throw new SemanticException(e);
}
// Check column number
List<? extends StructField> tableFields = oi.getAllStructFieldRefs();
boolean dynPart = HiveConf.getBoolVar(conf, HiveConf.ConfVars.DYNAMICPARTITIONING);
ArrayList<ColumnInfo> rowFields = opParseCtx.get(input).getRowResolver()
.getColumnInfos();
int inColumnCnt = rowFields.size();
int outColumnCnt = tableFields.size();
if (dynPart && dpCtx != null) {
outColumnCnt += dpCtx.getNumDPCols();
}
if (inColumnCnt != outColumnCnt) {
String reason = "Table " + dest + " has " + outColumnCnt
+ " columns, but query has " + inColumnCnt + " columns.";
throw new SemanticException(ErrorMsg.TARGET_TABLE_COLUMN_MISMATCH.getMsg(
qb.getParseInfo().getDestForClause(dest), reason));
} else if (dynPart && dpCtx != null) {
// create the mapping from input ExprNode to dest table DP column
dpCtx.mapInputToDP(rowFields.subList(tableFields.size(), rowFields.size()));
}
// Check column types
boolean converted = false;
int columnNumber = tableFields.size();
ArrayList<ExprNodeDesc> expressions = new ArrayList<ExprNodeDesc>(
columnNumber);
// MetadataTypedColumnsetSerDe does not need type conversions because it
// does the conversion to String by itself.
boolean isMetaDataSerDe = table_desc.getDeserializerClass().equals(
MetadataTypedColumnsetSerDe.class);
boolean isLazySimpleSerDe = table_desc.getDeserializerClass().equals(
LazySimpleSerDe.class);
if (!isMetaDataSerDe) {
// here only deals with non-partition columns. We deal with partition columns next
for (int i = 0; i < columnNumber; i++) {
ObjectInspector tableFieldOI = tableFields.get(i)
.getFieldObjectInspector();
TypeInfo tableFieldTypeInfo = TypeInfoUtils
.getTypeInfoFromObjectInspector(tableFieldOI);
TypeInfo rowFieldTypeInfo = rowFields.get(i).getType();
ExprNodeDesc column = new ExprNodeColumnDesc(rowFieldTypeInfo,
rowFields.get(i).getInternalName(), "", false, rowFields.get(i).isSkewedCol());
// LazySimpleSerDe can convert any types to String type using
// JSON-format.
if (!tableFieldTypeInfo.equals(rowFieldTypeInfo)
&& !(isLazySimpleSerDe
&& tableFieldTypeInfo.getCategory().equals(Category.PRIMITIVE) && tableFieldTypeInfo
.equals(TypeInfoFactory.stringTypeInfo))) {
// need to do some conversions here
converted = true;
if (tableFieldTypeInfo.getCategory() != Category.PRIMITIVE) {
// cannot convert to complex types
column = null;
} else {
column = ParseUtils.createConversionCast(
column, (PrimitiveTypeInfo)tableFieldTypeInfo);
}
if (column == null) {
String reason = "Cannot convert column " + i + " from "
+ rowFieldTypeInfo + " to " + tableFieldTypeInfo + ".";
throw new SemanticException(ErrorMsg.TARGET_TABLE_COLUMN_MISMATCH
.getMsg(qb.getParseInfo().getDestForClause(dest), reason));
}
}
expressions.add(column);
}
}
// deal with dynamic partition columns: convert ExprNodeDesc type to String??
if (dynPart && dpCtx != null && dpCtx.getNumDPCols() > 0) {
// DP columns starts with tableFields.size()
for (int i = tableFields.size(); i < rowFields.size(); ++i) {
TypeInfo rowFieldTypeInfo = rowFields.get(i).getType();
ExprNodeDesc column = new ExprNodeColumnDesc(
rowFieldTypeInfo, rowFields.get(i).getInternalName(), "", false);
expressions.add(column);
}
// converted = true; // [TODO]: should we check & convert type to String and set it to true?
}
if (converted) {
// add the select operator
RowResolver rowResolver = new RowResolver();
ArrayList<String> colName = new ArrayList<String>();
for (int i = 0; i < expressions.size(); i++) {
String name = getColumnInternalName(i);
rowResolver.put("", name, new ColumnInfo(name, expressions.get(i)
.getTypeInfo(), "", false));
colName.add(name);
}
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
new SelectDesc(expressions, colName), new RowSchema(rowResolver
.getColumnInfos()), input), rowResolver);
return output;
} else {
// not converted
return input;
}
}
@SuppressWarnings("nls")
private Operator genLimitPlan(String dest, QB qb, Operator input, int limit)
throws SemanticException {
// A map-only job can be optimized - instead of converting it to a
// map-reduce job, we can have another map
// job to do the same to avoid the cost of sorting in the map-reduce phase.
// A better approach would be to
// write into a local file and then have a map-only job.
// Add the limit operator to get the value fields
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
LimitDesc limitDesc = new LimitDesc(limit);
globalLimitCtx.setLastReduceLimitDesc(limitDesc);
Operator limitMap = putOpInsertMap(OperatorFactory.getAndMakeChild(
limitDesc, new RowSchema(inputRR.getColumnInfos()), input),
inputRR);
if (LOG.isDebugEnabled()) {
LOG.debug("Created LimitOperator Plan for clause: " + dest
+ " row schema: " + inputRR.toString());
}
return limitMap;
}
private Operator genUDTFPlan(GenericUDTF genericUDTF,
String outputTableAlias, ArrayList<String> colAliases, QB qb,
Operator input, boolean outerLV) throws SemanticException {
// No GROUP BY / DISTRIBUTE BY / SORT BY / CLUSTER BY
QBParseInfo qbp = qb.getParseInfo();
if (!qbp.getDestToGroupBy().isEmpty()) {
throw new SemanticException(ErrorMsg.UDTF_NO_GROUP_BY.getMsg());
}
if (!qbp.getDestToDistributeBy().isEmpty()) {
throw new SemanticException(ErrorMsg.UDTF_NO_DISTRIBUTE_BY.getMsg());
}
if (!qbp.getDestToSortBy().isEmpty()) {
throw new SemanticException(ErrorMsg.UDTF_NO_SORT_BY.getMsg());
}
if (!qbp.getDestToClusterBy().isEmpty()) {
throw new SemanticException(ErrorMsg.UDTF_NO_CLUSTER_BY.getMsg());
}
if (!qbp.getAliasToLateralViews().isEmpty()) {
throw new SemanticException(ErrorMsg.UDTF_LATERAL_VIEW.getMsg());
}
if (LOG.isDebugEnabled()) {
LOG.debug("Table alias: " + outputTableAlias + " Col aliases: "
+ colAliases);
}
// Use the RowResolver from the input operator to generate a input
// ObjectInspector that can be used to initialize the UDTF. Then, the
// resulting output object inspector can be used to make the RowResolver
// for the UDTF operator
RowResolver selectRR = opParseCtx.get(input).getRowResolver();
ArrayList<ColumnInfo> inputCols = selectRR.getColumnInfos();
// Create the object inspector for the input columns and initialize the UDTF
ArrayList<String> colNames = new ArrayList<String>();
ObjectInspector[] colOIs = new ObjectInspector[inputCols.size()];
for (int i = 0; i < inputCols.size(); i++) {
colNames.add(inputCols.get(i).getInternalName());
colOIs[i] = inputCols.get(i).getObjectInspector();
}
StandardStructObjectInspector rowOI =
ObjectInspectorFactory.getStandardStructObjectInspector(colNames, Arrays.asList(colOIs));
StructObjectInspector outputOI = genericUDTF.initialize(rowOI);
int numUdtfCols = outputOI.getAllStructFieldRefs().size();
if (colAliases.isEmpty()) {
// user did not specfied alias names, infer names from outputOI
for (StructField field : outputOI.getAllStructFieldRefs()) {
colAliases.add(field.getFieldName());
}
}
// Make sure that the number of column aliases in the AS clause matches
// the number of columns output by the UDTF
int numSuppliedAliases = colAliases.size();
if (numUdtfCols != numSuppliedAliases) {
throw new SemanticException(ErrorMsg.UDTF_ALIAS_MISMATCH
.getMsg("expected " + numUdtfCols + " aliases " + "but got "
+ numSuppliedAliases));
}
// Generate the output column info's / row resolver using internal names.
ArrayList<ColumnInfo> udtfCols = new ArrayList<ColumnInfo>();
Iterator<String> colAliasesIter = colAliases.iterator();
for (StructField sf : outputOI.getAllStructFieldRefs()) {
String colAlias = colAliasesIter.next();
assert (colAlias != null);
// Since the UDTF operator feeds into a LVJ operator that will rename
// all the internal names, we can just use field name from the UDTF's OI
// as the internal name
ColumnInfo col = new ColumnInfo(sf.getFieldName(), TypeInfoUtils
.getTypeInfoFromObjectInspector(sf.getFieldObjectInspector()),
outputTableAlias, false);
udtfCols.add(col);
}
// Create the row resolver for this operator from the output columns
RowResolver out_rwsch = new RowResolver();
for (int i = 0; i < udtfCols.size(); i++) {
out_rwsch.put(outputTableAlias, colAliases.get(i), udtfCols.get(i));
}
// Add the UDTFOperator to the operator DAG
Operator<?> udtf = putOpInsertMap(OperatorFactory.getAndMakeChild(
new UDTFDesc(genericUDTF, outerLV), new RowSchema(out_rwsch.getColumnInfos()),
input), out_rwsch);
return udtf;
}
@SuppressWarnings("nls")
private Operator genLimitMapRedPlan(String dest, QB qb, Operator input,
int limit, boolean extraMRStep) throws SemanticException {
// A map-only job can be optimized - instead of converting it to a
// map-reduce job, we can have another map
// job to do the same to avoid the cost of sorting in the map-reduce phase.
// A better approach would be to
// write into a local file and then have a map-only job.
// Add the limit operator to get the value fields
Operator curr = genLimitPlan(dest, qb, input, limit);
// the client requested that an extra map-reduce step be performed
if (!extraMRStep) {
return curr;
}
// Create a reduceSink operator followed by another limit
curr = genReduceSinkPlan(dest, qb, curr, 1);
return genLimitPlan(dest, qb, curr, limit);
}
private ArrayList<ExprNodeDesc> getPartitionColsFromBucketCols(String dest, QB qb, Table tab,
TableDesc table_desc, Operator input, boolean convert)
throws SemanticException {
List<String> tabBucketCols = tab.getBucketCols();
List<FieldSchema> tabCols = tab.getCols();
// Partition by the bucketing column
List<Integer> posns = new ArrayList<Integer>();
for (String bucketCol : tabBucketCols) {
int pos = 0;
for (FieldSchema tabCol : tabCols) {
if (bucketCol.equals(tabCol.getName())) {
posns.add(pos);
break;
}
pos++;
}
}
return genConvertCol(dest, qb, tab, table_desc, input, posns, convert);
}
private ArrayList<ExprNodeDesc> genConvertCol(String dest, QB qb, Table tab,
TableDesc table_desc, Operator input, List<Integer> posns, boolean convert)
throws SemanticException {
StructObjectInspector oi = null;
try {
Deserializer deserializer = table_desc.getDeserializerClass()
.newInstance();
SerDeUtils.initializeSerDe(deserializer, conf, table_desc.getProperties(), null);
oi = (StructObjectInspector) deserializer.getObjectInspector();
} catch (Exception e) {
throw new SemanticException(e);
}
List<? extends StructField> tableFields = oi.getAllStructFieldRefs();
ArrayList<ColumnInfo> rowFields = opParseCtx.get(input).getRowResolver()
.getColumnInfos();
// Check column type
int columnNumber = posns.size();
ArrayList<ExprNodeDesc> expressions = new ArrayList<ExprNodeDesc>(columnNumber);
for (Integer posn : posns) {
ObjectInspector tableFieldOI = tableFields.get(posn).getFieldObjectInspector();
TypeInfo tableFieldTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(tableFieldOI);
TypeInfo rowFieldTypeInfo = rowFields.get(posn).getType();
ExprNodeDesc column = new ExprNodeColumnDesc(rowFieldTypeInfo,
rowFields.get(posn).getInternalName(), rowFields.get(posn).getTabAlias(),
rowFields.get(posn).getIsVirtualCol());
if (convert && !tableFieldTypeInfo.equals(rowFieldTypeInfo)) {
// need to do some conversions here
if (tableFieldTypeInfo.getCategory() != Category.PRIMITIVE) {
// cannot convert to complex types
column = null;
} else {
column = ParseUtils.createConversionCast(
column, (PrimitiveTypeInfo)tableFieldTypeInfo);
}
if (column == null) {
String reason = "Cannot convert column " + posn + " from "
+ rowFieldTypeInfo + " to " + tableFieldTypeInfo + ".";
throw new SemanticException(ErrorMsg.TARGET_TABLE_COLUMN_MISMATCH
.getMsg(qb.getParseInfo().getDestForClause(dest), reason));
}
}
expressions.add(column);
}
return expressions;
}
private ArrayList<ExprNodeDesc> getSortCols(String dest, QB qb, Table tab, TableDesc table_desc,
Operator input, boolean convert)
throws SemanticException {
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
List<Order> tabSortCols = tab.getSortCols();
List<FieldSchema> tabCols = tab.getCols();
// Partition by the bucketing column
List<Integer> posns = new ArrayList<Integer>();
for (Order sortCol : tabSortCols) {
int pos = 0;
for (FieldSchema tabCol : tabCols) {
if (sortCol.getCol().equals(tabCol.getName())) {
ColumnInfo colInfo = inputRR.getColumnInfos().get(pos);
posns.add(pos);
break;
}
pos++;
}
}
return genConvertCol(dest, qb, tab, table_desc, input, posns, convert);
}
private ArrayList<Integer> getSortOrders(String dest, QB qb, Table tab, Operator input)
throws SemanticException {
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
List<Order> tabSortCols = tab.getSortCols();
List<FieldSchema> tabCols = tab.getCols();
ArrayList<Integer> orders = new ArrayList<Integer>();
for (Order sortCol : tabSortCols) {
for (FieldSchema tabCol : tabCols) {
if (sortCol.getCol().equals(tabCol.getName())) {
orders.add(sortCol.getOrder());
break;
}
}
}
return orders;
}
@SuppressWarnings("nls")
private Operator genReduceSinkPlanForSortingBucketing(Table tab, Operator input,
ArrayList<ExprNodeDesc> sortCols,
List<Integer> sortOrders,
ArrayList<ExprNodeDesc> partitionCols,
int numReducers)
throws SemanticException {
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
// For the generation of the values expression just get the inputs
// signature and generate field expressions for those
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
ArrayList<ExprNodeDesc> valueCols = new ArrayList<ExprNodeDesc>();
ArrayList<String> outputColumns = new ArrayList<String>();
int i = 0;
for (ColumnInfo colInfo : inputRR.getColumnInfos()) {
String internalName = getColumnInternalName(i++);
outputColumns.add(internalName);
valueCols.add(new ExprNodeColumnDesc(colInfo.getType(), colInfo
.getInternalName(), colInfo.getTabAlias(), colInfo
.getIsVirtualCol()));
colExprMap.put(internalName, valueCols
.get(valueCols.size() - 1));
}
StringBuilder order = new StringBuilder();
for (int sortOrder : sortOrders) {
order.append(sortOrder == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_ASC ? '+' : '-');
}
Operator interim = putOpInsertMap(OperatorFactory.getAndMakeChild(PlanUtils
.getReduceSinkDesc(sortCols, valueCols, outputColumns, false, -1,
partitionCols, order.toString(), numReducers),
new RowSchema(inputRR.getColumnInfos()), input), inputRR);
interim.setColumnExprMap(colExprMap);
reduceSinkOperatorsAddedByEnforceBucketingSorting.add((ReduceSinkOperator) interim);
// Add the extract operator to get the value fields
RowResolver out_rwsch = new RowResolver();
RowResolver interim_rwsch = inputRR;
Integer pos = Integer.valueOf(0);
for (ColumnInfo colInfo : interim_rwsch.getColumnInfos()) {
String[] info = interim_rwsch.reverseLookup(colInfo.getInternalName());
out_rwsch.put(info[0], info[1], new ColumnInfo(
getColumnInternalName(pos), colInfo.getType(), info[0],
colInfo.getIsVirtualCol(), colInfo.isHiddenVirtualCol()));
pos = Integer.valueOf(pos.intValue() + 1);
}
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
new ExtractDesc(new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,
Utilities.ReduceField.VALUE.toString(), "", false)), new RowSchema(
out_rwsch.getColumnInfos()), interim), out_rwsch);
if (LOG.isDebugEnabled()) {
LOG.debug("Created ReduceSink Plan for table: " + tab.getTableName() +
" row schema: " + out_rwsch.toString());
}
return output;
}
@SuppressWarnings("nls")
private Operator genReduceSinkPlan(String dest, QB qb, Operator<?> input,
int numReducers) throws SemanticException {
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
// First generate the expression for the partition and sort keys
// The cluster by clause / distribute by clause has the aliases for
// partition function
ASTNode partitionExprs = qb.getParseInfo().getClusterByForClause(dest);
if (partitionExprs == null) {
partitionExprs = qb.getParseInfo().getDistributeByForClause(dest);
}
ArrayList<ExprNodeDesc> partitionCols = new ArrayList<ExprNodeDesc>();
if (partitionExprs != null) {
int ccount = partitionExprs.getChildCount();
for (int i = 0; i < ccount; ++i) {
ASTNode cl = (ASTNode) partitionExprs.getChild(i);
partitionCols.add(genExprNodeDesc(cl, inputRR));
}
}
ASTNode sortExprs = qb.getParseInfo().getClusterByForClause(dest);
if (sortExprs == null) {
sortExprs = qb.getParseInfo().getSortByForClause(dest);
}
if (sortExprs == null) {
sortExprs = qb.getParseInfo().getOrderByForClause(dest);
if (sortExprs != null) {
assert numReducers == 1;
// in strict mode, in the presence of order by, limit must be specified
Integer limit = qb.getParseInfo().getDestLimit(dest);
if (conf.getVar(HiveConf.ConfVars.HIVEMAPREDMODE).equalsIgnoreCase(
"strict")
&& limit == null) {
throw new SemanticException(generateErrorMessage(sortExprs,
ErrorMsg.NO_LIMIT_WITH_ORDERBY.getMsg()));
}
}
}
Operator dummy = Operator.createDummy();
dummy.setParentOperators(Arrays.asList(input));
ArrayList<ExprNodeDesc> sortCols = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> sortColsBack = new ArrayList<ExprNodeDesc>();
StringBuilder order = new StringBuilder();
if (sortExprs != null) {
int ccount = sortExprs.getChildCount();
for (int i = 0; i < ccount; ++i) {
ASTNode cl = (ASTNode) sortExprs.getChild(i);
if (cl.getType() == HiveParser.TOK_TABSORTCOLNAMEASC) {
// SortBy ASC
order.append("+");
cl = (ASTNode) cl.getChild(0);
} else if (cl.getType() == HiveParser.TOK_TABSORTCOLNAMEDESC) {
// SortBy DESC
order.append("-");
cl = (ASTNode) cl.getChild(0);
} else {
// ClusterBy
order.append("+");
}
ExprNodeDesc exprNode = genExprNodeDesc(cl, inputRR);
sortCols.add(exprNode);
sortColsBack.add(ExprNodeDescUtils.backtrack(exprNode, dummy, input));
}
}
// For the generation of the values expression just get the inputs
// signature and generate field expressions for those
RowResolver rsRR = new RowResolver();
ArrayList<String> outputColumns = new ArrayList<String>();
ArrayList<ExprNodeDesc> valueCols = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> valueColsBack = new ArrayList<ExprNodeDesc>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
ArrayList<ColumnInfo> columnInfos = inputRR.getColumnInfos();
int[] index = new int[columnInfos.size()];
for (int i = 0; i < index.length; i++) {
ColumnInfo colInfo = columnInfos.get(i);
String[] nm = inputRR.reverseLookup(colInfo.getInternalName());
String[] nm2 = inputRR.getAlternateMappings(colInfo.getInternalName());
ExprNodeColumnDesc value = new ExprNodeColumnDesc(colInfo.getType(),
colInfo.getInternalName(), colInfo.getTabAlias(), colInfo.getIsVirtualCol());
// backtrack can be null when input is script operator
ExprNodeDesc valueBack = ExprNodeDescUtils.backtrack(value, dummy, input);
int kindex = valueBack == null ? -1 : ExprNodeDescUtils.indexOf(valueBack, sortColsBack);
if (kindex >= 0) {
index[i] = kindex;
ColumnInfo newColInfo = new ColumnInfo(colInfo);
newColInfo.setInternalName(Utilities.ReduceField.KEY + ".reducesinkkey" + kindex);
newColInfo.setTabAlias(nm[0]);
rsRR.addMappingOnly(nm[0], nm[1], newColInfo);
if (nm2 != null) {
rsRR.addMappingOnly(nm2[0], nm2[1], newColInfo);
}
continue;
}
int vindex = valueBack == null ? -1 : ExprNodeDescUtils.indexOf(valueBack, valueColsBack);
if (vindex >= 0) {
index[i] = -vindex - 1;
continue;
}
index[i] = -valueCols.size() - 1;
String outputColName = getColumnInternalName(valueCols.size());
valueCols.add(value);
valueColsBack.add(valueBack);
ColumnInfo newColInfo = new ColumnInfo(colInfo);
newColInfo.setInternalName(Utilities.ReduceField.VALUE + "." + outputColName);
newColInfo.setTabAlias(nm[0]);
rsRR.put(nm[0], nm[1], newColInfo);
if (nm2 != null) {
rsRR.addMappingOnly(nm2[0], nm2[1], newColInfo);
}
outputColumns.add(outputColName);
}
dummy.setParentOperators(null);
ReduceSinkDesc rsdesc = PlanUtils.getReduceSinkDesc(sortCols, valueCols, outputColumns,
false, -1, partitionCols, order.toString(), numReducers);
Operator interim = putOpInsertMap(OperatorFactory.getAndMakeChild(rsdesc,
new RowSchema(rsRR.getColumnInfos()), input), rsRR);
List<String> keyColNames = rsdesc.getOutputKeyColumnNames();
for (int i = 0 ; i < keyColNames.size(); i++) {
colExprMap.put(Utilities.ReduceField.KEY + "." + keyColNames.get(i), sortCols.get(i));
}
List<String> valueColNames = rsdesc.getOutputValueColumnNames();
for (int i = 0 ; i < valueColNames.size(); i++) {
colExprMap.put(Utilities.ReduceField.VALUE + "." + valueColNames.get(i), valueCols.get(i));
}
interim.setColumnExprMap(colExprMap);
RowResolver selectRR = new RowResolver();
ArrayList<ExprNodeDesc> selCols = new ArrayList<ExprNodeDesc>();
ArrayList<String> selOutputCols = new ArrayList<String>();
Map<String, ExprNodeDesc> selColExprMap = new HashMap<String, ExprNodeDesc>();
for (int i = 0; i < index.length; i++) {
ColumnInfo prev = columnInfos.get(i);
String[] nm = inputRR.reverseLookup(prev.getInternalName());
String[] nm2 = inputRR.getAlternateMappings(prev.getInternalName());
ColumnInfo info = new ColumnInfo(prev);
String field;
if (index[i] >= 0) {
field = Utilities.ReduceField.KEY + "." + keyColNames.get(index[i]);
} else {
field = Utilities.ReduceField.VALUE + "." + valueColNames.get(-index[i] - 1);
}
String internalName = getColumnInternalName(i);
ExprNodeColumnDesc desc = new ExprNodeColumnDesc(info.getType(),
field, info.getTabAlias(), info.getIsVirtualCol());
selCols.add(desc);
info.setInternalName(internalName);
selectRR.put(nm[0], nm[1], info);
if (nm2 != null) {
selectRR.addMappingOnly(nm2[0], nm2[1], info);
}
selOutputCols.add(internalName);
selColExprMap.put(internalName, desc);
}
SelectDesc select = new SelectDesc(selCols, selOutputCols);
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(select,
new RowSchema(selectRR.getColumnInfos()), interim), selectRR);
output.setColumnExprMap(selColExprMap);
return output;
}
private Operator genJoinOperatorChildren(QBJoinTree join, Operator left,
Operator[] right, HashSet<Integer> omitOpts) throws SemanticException {
RowResolver outputRR = new RowResolver();
ArrayList<String> outputColumnNames = new ArrayList<String>();
// all children are base classes
Operator<?>[] rightOps = new Operator[right.length];
int outputPos = 0;
Map<String, Byte> reversedExprs = new HashMap<String, Byte>();
HashMap<Byte, List<ExprNodeDesc>> exprMap = new HashMap<Byte, List<ExprNodeDesc>>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
HashMap<Integer, Set<String>> posToAliasMap = new HashMap<Integer, Set<String>>();
HashMap<Byte, List<ExprNodeDesc>> filterMap =
new HashMap<Byte, List<ExprNodeDesc>>();
for (int pos = 0; pos < right.length; ++pos) {
Operator<?> input = right[pos] == null ? left : right[pos];
if (input == null) {
input = left;
}
ReduceSinkOperator rs = (ReduceSinkOperator) input;
if (rs.getNumParent() != 1) {
throw new SemanticException("RS should have single parent");
}
Operator<?> parent = rs.getParentOperators().get(0);
ReduceSinkDesc rsDesc = (ReduceSinkDesc) (input.getConf());
int[] index = rs.getValueIndex();
ArrayList<ExprNodeDesc> valueDesc = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> filterDesc = new ArrayList<ExprNodeDesc>();
Byte tag = (byte) rsDesc.getTag();
// check whether this input operator produces output
if (omitOpts != null && omitOpts.contains(pos)) {
exprMap.put(tag, valueDesc);
filterMap.put(tag, filterDesc);
rightOps[pos] = input;
continue;
}
List<String> keyColNames = rsDesc.getOutputKeyColumnNames();
List<String> valColNames = rsDesc.getOutputValueColumnNames();
// prepare output descriptors for the input opt
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
RowResolver parentRR = opParseCtx.get(parent).getRowResolver();
posToAliasMap.put(pos, new HashSet<String>(inputRR.getTableNames()));
List<ColumnInfo> columns = parentRR.getColumnInfos();
for (int i = 0; i < index.length; i++) {
ColumnInfo prev = columns.get(i);
String[] nm = parentRR.reverseLookup(prev.getInternalName());
String[] nm2 = parentRR.getAlternateMappings(prev.getInternalName());
if (outputRR.get(nm[0], nm[1]) != null) {
continue;
}
ColumnInfo info = new ColumnInfo(prev);
String field;
if (index[i] >= 0) {
field = Utilities.ReduceField.KEY + "." + keyColNames.get(index[i]);
} else {
field = Utilities.ReduceField.VALUE + "." + valColNames.get(-index[i] - 1);
}
String internalName = getColumnInternalName(outputColumnNames.size());
ExprNodeColumnDesc desc = new ExprNodeColumnDesc(info.getType(),
field, info.getTabAlias(), info.getIsVirtualCol());
info.setInternalName(internalName);
colExprMap.put(internalName, desc);
outputRR.put(nm[0], nm[1], info);
if (nm2 != null) {
outputRR.addMappingOnly(nm2[0], nm2[1], info);
}
valueDesc.add(desc);
outputColumnNames.add(internalName);
reversedExprs.put(internalName, tag);
}
for (ASTNode cond : join.getFilters().get(tag)) {
filterDesc.add(genExprNodeDesc(cond, inputRR));
}
exprMap.put(tag, valueDesc);
filterMap.put(tag, filterDesc);
rightOps[pos] = input;
}
JoinCondDesc[] joinCondns = new JoinCondDesc[join.getJoinCond().length];
for (int i = 0; i < join.getJoinCond().length; i++) {
JoinCond condn = join.getJoinCond()[i];
joinCondns[i] = new JoinCondDesc(condn);
}
JoinDesc desc = new JoinDesc(exprMap, outputColumnNames,
join.getNoOuterJoin(), joinCondns, filterMap);
desc.setReversedExprs(reversedExprs);
desc.setFilterMap(join.getFilterMap());
JoinOperator joinOp = (JoinOperator) OperatorFactory.getAndMakeChild(desc,
new RowSchema(outputRR.getColumnInfos()), rightOps);
joinOp.setColumnExprMap(colExprMap);
joinOp.setPosToAliasMap(posToAliasMap);
if (join.getNullSafes() != null) {
boolean[] nullsafes = new boolean[join.getNullSafes().size()];
for (int i = 0; i < nullsafes.length; i++) {
nullsafes[i] = join.getNullSafes().get(i);
}
desc.setNullSafes(nullsafes);
}
queryProperties.incrementJoinCount(joinOp.getConf().getNoOuterJoin());
return putOpInsertMap(joinOp, outputRR);
}
private ExprNodeDesc[][] genJoinKeys(QBJoinTree joinTree, Operator[] inputs)
throws SemanticException {
ExprNodeDesc[][] joinKeys = new ExprNodeDesc[inputs.length][];
for (int i = 0; i < inputs.length; i++) {
RowResolver inputRR = opParseCtx.get(inputs[i]).getRowResolver();
List<ASTNode> expressions = joinTree.getExpressions().get(i);
joinKeys[i] = new ExprNodeDesc[expressions.size()];
for (int j = 0; j < joinKeys[i].length; j++) {
joinKeys[i][j] = genExprNodeDesc(expressions.get(j), inputRR);
}
}
// Type checking and implicit type conversion for join keys
return genJoinOperatorTypeCheck(joinKeys);
}
@SuppressWarnings("nls")
private Operator genJoinReduceSinkChild(QB qb, ExprNodeDesc[] joinKeys,
Operator<?> child, String[] srcs, int tag) throws SemanticException {
Operator dummy = Operator.createDummy(); // dummy for backtracking
dummy.setParentOperators(Arrays.asList(child));
RowResolver inputRR = opParseCtx.get(child).getRowResolver();
RowResolver outputRR = new RowResolver();
ArrayList<String> outputColumns = new ArrayList<String>();
ArrayList<ExprNodeDesc> reduceKeys = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> reduceKeysBack = new ArrayList<ExprNodeDesc>();
// Compute join keys and store in reduceKeys
for (ExprNodeDesc joinKey : joinKeys) {
reduceKeys.add(joinKey);
reduceKeysBack.add(ExprNodeDescUtils.backtrack(joinKey, dummy, child));
}
// Walk over the input row resolver and copy in the output
ArrayList<ExprNodeDesc> reduceValues = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> reduceValuesBack = new ArrayList<ExprNodeDesc>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
List<ColumnInfo> columns = inputRR.getColumnInfos();
int[] index = new int[columns.size()];
for (int i = 0; i < columns.size(); i++) {
ColumnInfo colInfo = columns.get(i);
String[] nm = inputRR.reverseLookup(colInfo.getInternalName());
String[] nm2 = inputRR.getAlternateMappings(colInfo.getInternalName());
ExprNodeDesc expr = new ExprNodeColumnDesc(colInfo.getType(),
colInfo.getInternalName(), colInfo.getTabAlias(), colInfo.getIsVirtualCol());
// backtrack can be null when input is script operator
ExprNodeDesc exprBack = ExprNodeDescUtils.backtrack(expr, dummy, child);
int kindex = exprBack == null ? -1 : ExprNodeDescUtils.indexOf(exprBack, reduceKeysBack);
if (kindex >= 0) {
ColumnInfo newColInfo = new ColumnInfo(colInfo);
newColInfo.setInternalName(Utilities.ReduceField.KEY + ".reducesinkkey" + kindex);
newColInfo.setTabAlias(nm[0]);
outputRR.addMappingOnly(nm[0], nm[1], newColInfo);
if (nm2 != null) {
outputRR.addMappingOnly(nm2[0], nm2[1], newColInfo);
}
index[i] = kindex;
continue;
}
int vindex = exprBack == null ? -1 : ExprNodeDescUtils.indexOf(exprBack, reduceValuesBack);
if (kindex >= 0) {
index[i] = -vindex - 1;
continue;
}
index[i] = -reduceValues.size() - 1;
String outputColName = getColumnInternalName(reduceValues.size());
reduceValues.add(expr);
reduceValuesBack.add(exprBack);
ColumnInfo newColInfo = new ColumnInfo(colInfo);
newColInfo.setInternalName(Utilities.ReduceField.VALUE + "." + outputColName);
newColInfo.setTabAlias(nm[0]);
outputRR.put(nm[0], nm[1], newColInfo);
if (nm2 != null) {
outputRR.addMappingOnly(nm2[0], nm2[1], newColInfo);
}
outputColumns.add(outputColName);
}
dummy.setParentOperators(null);
int numReds = -1;
// Use only 1 reducer in case of cartesian product
if (reduceKeys.size() == 0) {
numReds = 1;
// Cartesian product is not supported in strict mode
if (conf.getVar(HiveConf.ConfVars.HIVEMAPREDMODE).equalsIgnoreCase(
"strict")) {
throw new SemanticException(ErrorMsg.NO_CARTESIAN_PRODUCT.getMsg());
}
}
ReduceSinkDesc rsDesc = PlanUtils.getReduceSinkDesc(reduceKeys,
reduceValues, outputColumns, false, tag,
reduceKeys.size(), numReds);
ReduceSinkOperator rsOp = (ReduceSinkOperator) putOpInsertMap(
OperatorFactory.getAndMakeChild(rsDesc, new RowSchema(outputRR
.getColumnInfos()), child), outputRR);
List<String> keyColNames = rsDesc.getOutputKeyColumnNames();
for (int i = 0 ; i < keyColNames.size(); i++) {
colExprMap.put(Utilities.ReduceField.KEY + "." + keyColNames.get(i), reduceKeys.get(i));
}
List<String> valColNames = rsDesc.getOutputValueColumnNames();
for (int i = 0 ; i < valColNames.size(); i++) {
colExprMap.put(Utilities.ReduceField.VALUE + "." + valColNames.get(i), reduceValues.get(i));
}
rsOp.setValueIndex(index);
rsOp.setColumnExprMap(colExprMap);
rsOp.setInputAliases(srcs);
return rsOp;
}
private Operator genJoinOperator(QB qb, QBJoinTree joinTree,
Map<String, Operator> map,
Operator joiningOp) throws SemanticException {
QBJoinTree leftChild = joinTree.getJoinSrc();
Operator joinSrcOp = joiningOp instanceof JoinOperator ? joiningOp : null;
if (joinSrcOp == null && leftChild != null) {
joinSrcOp = genJoinOperator(qb, leftChild, map, null);
}
if ( joinSrcOp != null ) {
ArrayList<ASTNode> filter = joinTree.getFiltersForPushing().get(0);
for (ASTNode cond : filter) {
joinSrcOp = genFilterPlan(qb, cond, joinSrcOp);
}
}
String[] baseSrc = joinTree.getBaseSrc();
Operator[] srcOps = new Operator[baseSrc.length];
HashSet<Integer> omitOpts = null; // set of input to the join that should be
// omitted by the output
int pos = 0;
for (String src : baseSrc) {
if (src != null) {
Operator srcOp = map.get(src.toLowerCase());
// for left-semi join, generate an additional selection & group-by
// operator before ReduceSink
ArrayList<ASTNode> fields = joinTree.getRHSSemijoinColumns(src);
if (fields != null) {
// the RHS table columns should be not be output from the join
if (omitOpts == null) {
omitOpts = new HashSet<Integer>();
}
omitOpts.add(pos);
// generate a selection operator for group-by keys only
srcOp = insertSelectForSemijoin(fields, srcOp);
// generate a groupby operator (HASH mode) for a map-side partial
// aggregation for semijoin
srcOps[pos++] = genMapGroupByForSemijoin(qb, fields, srcOp,
GroupByDesc.Mode.HASH);
} else {
srcOps[pos++] = srcOp;
}
} else {
assert pos == 0;
srcOps[pos++] = joinSrcOp;
}
}
ExprNodeDesc[][] joinKeys = genJoinKeys(joinTree, srcOps);
for (int i = 0; i < srcOps.length; i++) {
// generate a ReduceSink operator for the join
String[] srcs = baseSrc[i] != null ? new String[] {baseSrc[i]} : joinTree.getLeftAliases();
srcOps[i] = genNotNullFilterForJoinSourcePlan(qb, srcOps[i], joinTree, joinKeys[i]);
srcOps[i] = genJoinReduceSinkChild(qb, joinKeys[i], srcOps[i], srcs, joinTree.getNextTag());
}
JoinOperator joinOp = (JoinOperator) genJoinOperatorChildren(joinTree,
joinSrcOp, srcOps, omitOpts);
joinContext.put(joinOp, joinTree);
Operator op = joinOp;
for(ASTNode condn : joinTree.getPostJoinFilters() ) {
op = genFilterPlan(qb, condn, op);
}
return op;
}
/**
* Construct a selection operator for semijoin that filter out all fields
* other than the group by keys.
*
* @param fields
* list of fields need to be output
* @param input
* input operator
* @return the selection operator.
* @throws SemanticException
*/
private Operator insertSelectForSemijoin(ArrayList<ASTNode> fields,
Operator input) throws SemanticException {
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
ArrayList<ExprNodeDesc> colList = new ArrayList<ExprNodeDesc>();
ArrayList<String> columnNames = new ArrayList<String>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
// construct the list of columns that need to be projected
for (ASTNode field : fields) {
ExprNodeColumnDesc exprNode = (ExprNodeColumnDesc) genExprNodeDesc(field,
inputRR);
colList.add(exprNode);
columnNames.add(exprNode.getColumn());
colExprMap.put(exprNode.getColumn(), exprNode);
}
// create selection operator
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
new SelectDesc(colList, columnNames, false), new RowSchema(inputRR
.getColumnInfos()), input), inputRR);
output.setColumnExprMap(colExprMap);
return output;
}
private Operator genMapGroupByForSemijoin(QB qb, ArrayList<ASTNode> fields, // the
// ASTNode
// of
// the
// join
// key
// "tab.col"
Operator inputOperatorInfo, GroupByDesc.Mode mode)
throws SemanticException {
RowResolver groupByInputRowResolver = opParseCtx.get(inputOperatorInfo)
.getRowResolver();
RowResolver groupByOutputRowResolver = new RowResolver();
ArrayList<ExprNodeDesc> groupByKeys = new ArrayList<ExprNodeDesc>();
ArrayList<String> outputColumnNames = new ArrayList<String>();
ArrayList<AggregationDesc> aggregations = new ArrayList<AggregationDesc>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
qb.getParseInfo();
groupByOutputRowResolver.setIsExprResolver(true); // join keys should only
// be columns but not be
// expressions
for (int i = 0; i < fields.size(); ++i) {
// get the group by keys to ColumnInfo
ASTNode colName = fields.get(i);
ExprNodeDesc grpByExprNode = genExprNodeDesc(colName,
groupByInputRowResolver);
groupByKeys.add(grpByExprNode);
// generate output column names
String field = getColumnInternalName(i);
outputColumnNames.add(field);
ColumnInfo colInfo2 = new ColumnInfo(field, grpByExprNode.getTypeInfo(),
"", false);
groupByOutputRowResolver.putExpression(colName, colInfo2);
// establish mapping from the output column to the input column
colExprMap.put(field, grpByExprNode);
}
// Generate group-by operator
float groupByMemoryUsage = HiveConf.getFloatVar(conf, HiveConf.ConfVars.HIVEMAPAGGRHASHMEMORY);
float memoryThreshold = HiveConf
.getFloatVar(conf, HiveConf.ConfVars.HIVEMAPAGGRMEMORYTHRESHOLD);
Operator op = putOpInsertMap(OperatorFactory.getAndMakeChild(
new GroupByDesc(mode, outputColumnNames, groupByKeys, aggregations,
false, groupByMemoryUsage, memoryThreshold, null, false, 0, false),
new RowSchema(groupByOutputRowResolver.getColumnInfos()),
inputOperatorInfo), groupByOutputRowResolver);
op.setColumnExprMap(colExprMap);
return op;
}
private ExprNodeDesc[][] genJoinOperatorTypeCheck(ExprNodeDesc[][] keys)
throws SemanticException {
// keys[i] -> ArrayList<exprNodeDesc> for the i-th join operator key list
int keyLength = 0;
for (int i = 0; i < keys.length; i++) {
if (i == 0) {
keyLength = keys[i].length;
} else {
assert keyLength == keys[i].length;
}
}
// implicit type conversion hierarchy
for (int k = 0; k < keyLength; k++) {
// Find the common class for type conversion
TypeInfo commonType = keys[0][k].getTypeInfo();
for (int i = 1; i < keys.length; i++) {
TypeInfo a = commonType;
TypeInfo b = keys[i][k].getTypeInfo();
commonType = FunctionRegistry.getCommonClassForComparison(a, b);
if (commonType == null) {
throw new SemanticException(
"Cannot do equality join on different types: " + a.getTypeName()
+ " and " + b.getTypeName());
}
}
// Add implicit type conversion if necessary
for (int i = 0; i < keys.length; i++) {
if (TypeInfoUtils.isConversionRequiredForComparison(
keys[i][k].getTypeInfo(), commonType)) {
keys[i][k] = ParseUtils.createConversionCast(
keys[i][k], (PrimitiveTypeInfo)commonType);
}
}
}
return keys;
}
private Operator genJoinPlan(QB qb, Map<String, Operator> map)
throws SemanticException {
QBJoinTree joinTree = qb.getQbJoinTree();
Operator joinOp = genJoinOperator(qb, joinTree, map, null);
return joinOp;
}
/**
* Extract the filters from the join condition and push them on top of the
* source operators. This procedure traverses the query tree recursively,
*/
private void pushJoinFilters(QB qb, QBJoinTree joinTree,
Map<String, Operator> map) throws SemanticException {
pushJoinFilters(qb, joinTree, map, true);
}
/**
* Extract the filters from the join condition and push them on top of the
* source operators. This procedure traverses the query tree recursively,
*/
private void pushJoinFilters(QB qb, QBJoinTree joinTree,
Map<String, Operator> map,
boolean recursively) throws SemanticException {
if ( recursively ) {
if (joinTree.getJoinSrc() != null) {
pushJoinFilters(qb, joinTree.getJoinSrc(), map);
}
}
ArrayList<ArrayList<ASTNode>> filters = joinTree.getFiltersForPushing();
int pos = 0;
for (String src : joinTree.getBaseSrc()) {
if (src != null) {
Operator srcOp = map.get(src);
ArrayList<ASTNode> filter = filters.get(pos);
for (ASTNode cond : filter) {
srcOp = genFilterPlan(qb, cond, srcOp);
}
map.put(src, srcOp);
}
pos++;
}
}
private List<String> getMapSideJoinTables(QB qb) {
List<String> cols = new ArrayList<String>();
ASTNode hints = qb.getParseInfo().getHints();
for (int pos = 0; pos < hints.getChildCount(); pos++) {
ASTNode hint = (ASTNode) hints.getChild(pos);
if (((ASTNode) hint.getChild(0)).getToken().getType() == HiveParser.TOK_MAPJOIN) {
// the user has specified to ignore mapjoin hint
if (!conf.getBoolVar(HiveConf.ConfVars.HIVEIGNOREMAPJOINHINT)
&& !conf.getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) {
ASTNode hintTblNames = (ASTNode) hint.getChild(1);
int numCh = hintTblNames.getChildCount();
for (int tblPos = 0; tblPos < numCh; tblPos++) {
String tblName = ((ASTNode) hintTblNames.getChild(tblPos)).getText()
.toLowerCase();
if (!cols.contains(tblName)) {
cols.add(tblName);
}
}
}
else {
queryProperties.setMapJoinRemoved(true);
}
}
}
return cols;
}
// The join alias is modified before being inserted for consumption by sort-merge
// join queries. If the join is part of a sub-query the alias is modified to include
// the sub-query alias.
private String getModifiedAlias(QB qb, String alias) {
return QB.getAppendedAliasFromId(qb.getId(), alias);
}
private QBJoinTree genUniqueJoinTree(QB qb, ASTNode joinParseTree,
Map<String, Operator> aliasToOpInfo)
throws SemanticException {
QBJoinTree joinTree = new QBJoinTree();
joinTree.setNoOuterJoin(false);
joinTree.setExpressions(new ArrayList<ArrayList<ASTNode>>());
joinTree.setFilters(new ArrayList<ArrayList<ASTNode>>());
joinTree.setFiltersForPushing(new ArrayList<ArrayList<ASTNode>>());
// Create joinTree structures to fill them up later
ArrayList<String> rightAliases = new ArrayList<String>();
ArrayList<String> leftAliases = new ArrayList<String>();
ArrayList<String> baseSrc = new ArrayList<String>();
ArrayList<Boolean> preserved = new ArrayList<Boolean>();
boolean lastPreserved = false;
int cols = -1;
for (int i = 0; i < joinParseTree.getChildCount(); i++) {
ASTNode child = (ASTNode) joinParseTree.getChild(i);
switch (child.getToken().getType()) {
case HiveParser.TOK_TABREF:
// Handle a table - populate aliases appropriately:
// leftAliases should contain the first table, rightAliases should
// contain all other tables and baseSrc should contain all tables
String tableName = getUnescapedUnqualifiedTableName((ASTNode) child.getChild(0));
String alias = child.getChildCount() == 1 ? tableName
: unescapeIdentifier(child.getChild(child.getChildCount() - 1)
.getText().toLowerCase());
if (i == 0) {
leftAliases.add(alias);
joinTree.setLeftAlias(alias);
} else {
rightAliases.add(alias);
}
joinTree.getAliasToOpInfo().put(
getModifiedAlias(qb, alias), aliasToOpInfo.get(alias));
joinTree.setId(qb.getId());
baseSrc.add(alias);
preserved.add(lastPreserved);
lastPreserved = false;
break;
case HiveParser.TOK_EXPLIST:
if (cols == -1 && child.getChildCount() != 0) {
cols = child.getChildCount();
} else if (child.getChildCount() != cols) {
throw new SemanticException("Tables with different or invalid "
+ "number of keys in UNIQUEJOIN");
}
ArrayList<ASTNode> expressions = new ArrayList<ASTNode>();
ArrayList<ASTNode> filt = new ArrayList<ASTNode>();
ArrayList<ASTNode> filters = new ArrayList<ASTNode>();
for (Node exp : child.getChildren()) {
expressions.add((ASTNode) exp);
}
joinTree.getExpressions().add(expressions);
joinTree.getFilters().add(filt);
joinTree.getFiltersForPushing().add(filters);
break;
case HiveParser.KW_PRESERVE:
lastPreserved = true;
break;
case HiveParser.TOK_SUBQUERY:
throw new SemanticException(
"Subqueries are not supported in UNIQUEJOIN");
default:
throw new SemanticException("Unexpected UNIQUEJOIN structure");
}
}
joinTree.setBaseSrc(baseSrc.toArray(new String[0]));
joinTree.setLeftAliases(leftAliases.toArray(new String[0]));
joinTree.setRightAliases(rightAliases.toArray(new String[0]));
JoinCond[] condn = new JoinCond[preserved.size()];
for (int i = 0; i < condn.length; i++) {
condn[i] = new JoinCond(preserved.get(i));
}
joinTree.setJoinCond(condn);
if (qb.getParseInfo().getHints() != null) {
parseStreamTables(joinTree, qb);
}
return joinTree;
}
/*
* Setup a QBJoinTree between a SubQuery and its Parent Query. The Parent Query
* is the lhs of the Join.
*
* The Parent Query is represented by the last Operator needed to process its From Clause.
* In case of a single table Query this will be a TableScan, but it can be a Join Operator
* if the Parent Query contains Join clauses, or in case of a single source from clause,
* the source could be a SubQuery or a PTF invocation.
*
* We setup the QBJoinTree with the above constrains in place. So:
* - the lhs of the QBJoinTree can be a another QBJoinTree if the Parent Query operator
* is a JoinOperator. In this case we get its QBJoinTree from the 'joinContext'
* - the rhs is always a reference to the SubQuery. Its alias is obtained from the
* QBSubQuery object.
*
* The QBSubQuery also provides the Joining Condition AST. The Joining Condition has been
* transformed in QBSubQuery setup, before this call. The Joining condition has any correlated
* predicates and a predicate for joining the Parent Query expression with the SubQuery.
*
* The QBSubQuery also specifies what kind of Join to construct.
*
* Given this information, once we initialize the QBJoinTree, we call the 'parseJoinCondition'
* method to validate and parse Join conditions.
*/
private QBJoinTree genSQJoinTree(QB qb, ISubQueryJoinInfo subQuery,
Operator joiningOp,
Map<String, Operator> aliasToOpInfo)
throws SemanticException {
QBJoinTree joinTree = new QBJoinTree();
JoinCond[] condn = new JoinCond[1];
switch (subQuery.getJoinType()) {
case LEFTOUTER:
joinTree.setNoOuterJoin(false);
condn[0] = new JoinCond(0, 1, JoinType.LEFTOUTER);
break;
case RIGHTOUTER:
joinTree.setNoOuterJoin(false);
condn[0] = new JoinCond(0, 1, JoinType.RIGHTOUTER);
break;
case FULLOUTER:
joinTree.setNoOuterJoin(false);
condn[0] = new JoinCond(0, 1, JoinType.FULLOUTER);
break;
case LEFTSEMI:
joinTree.setNoSemiJoin(false);
condn[0] = new JoinCond(0, 1, JoinType.LEFTSEMI);
break;
default:
condn[0] = new JoinCond(0, 1, JoinType.INNER);
joinTree.setNoOuterJoin(true);
break;
}
joinTree.setJoinCond(condn);
if ( joiningOp instanceof JoinOperator ) {
QBJoinTree leftTree = joinContext.get(joiningOp);
joinTree.setJoinSrc(leftTree);
String[] leftChildAliases = leftTree.getLeftAliases();
String leftAliases[] = new String[leftChildAliases.length + 1];
for (int i = 0; i < leftChildAliases.length; i++) {
leftAliases[i] = leftChildAliases[i];
}
leftAliases[leftChildAliases.length] = leftTree.getRightAliases()[0];
joinTree.setLeftAliases(leftAliases);
} else {
String alias = unescapeIdentifier(
SubQueryUtils.getAlias(joiningOp, aliasToOpInfo).toLowerCase());
joinTree.setLeftAlias(alias);
String[] leftAliases = new String[1];
leftAliases[0] = alias;
joinTree.setLeftAliases(leftAliases);
String[] children = new String[2];
children[0] = alias;
joinTree.setBaseSrc(children);
joinTree.setId(qb.getId());
joinTree.getAliasToOpInfo().put(
getModifiedAlias(qb, alias), aliasToOpInfo.get(alias));
}
String rightalias = unescapeIdentifier(subQuery.getAlias().toLowerCase());
String[] rightAliases = new String[1];
rightAliases[0] = rightalias;
joinTree.setRightAliases(rightAliases);
String[] children = joinTree.getBaseSrc();
if (children == null) {
children = new String[2];
}
children[1] = rightalias;
joinTree.setBaseSrc(children);
joinTree.setId(qb.getId());
joinTree.getAliasToOpInfo().put(
getModifiedAlias(qb, rightalias), aliasToOpInfo.get(rightalias));
// remember rhs table for semijoin
if (joinTree.getNoSemiJoin() == false) {
joinTree.addRHSSemijoin(rightalias);
}
ArrayList<ArrayList<ASTNode>> expressions = new ArrayList<ArrayList<ASTNode>>();
expressions.add(new ArrayList<ASTNode>());
expressions.add(new ArrayList<ASTNode>());
joinTree.setExpressions(expressions);
ArrayList<Boolean> nullsafes = new ArrayList<Boolean>();
joinTree.setNullSafes(nullsafes);
ArrayList<ArrayList<ASTNode>> filters = new ArrayList<ArrayList<ASTNode>>();
filters.add(new ArrayList<ASTNode>());
filters.add(new ArrayList<ASTNode>());
joinTree.setFilters(filters);
joinTree.setFilterMap(new int[2][]);
ArrayList<ArrayList<ASTNode>> filtersForPushing =
new ArrayList<ArrayList<ASTNode>>();
filtersForPushing.add(new ArrayList<ASTNode>());
filtersForPushing.add(new ArrayList<ASTNode>());
joinTree.setFiltersForPushing(filtersForPushing);
ASTNode joinCond = subQuery.getJoinConditionAST();
ArrayList<String> leftSrc = new ArrayList<String>();
parseJoinCondition(joinTree, joinCond, leftSrc, aliasToOpInfo);
if (leftSrc.size() == 1) {
joinTree.setLeftAlias(leftSrc.get(0));
}
return joinTree;
}
private QBJoinTree genJoinTree(QB qb, ASTNode joinParseTree,
Map<String, Operator> aliasToOpInfo)
throws SemanticException {
QBJoinTree joinTree = new QBJoinTree();
JoinCond[] condn = new JoinCond[1];
switch (joinParseTree.getToken().getType()) {
case HiveParser.TOK_LEFTOUTERJOIN:
joinTree.setNoOuterJoin(false);
condn[0] = new JoinCond(0, 1, JoinType.LEFTOUTER);
break;
case HiveParser.TOK_RIGHTOUTERJOIN:
joinTree.setNoOuterJoin(false);
condn[0] = new JoinCond(0, 1, JoinType.RIGHTOUTER);
break;
case HiveParser.TOK_FULLOUTERJOIN:
joinTree.setNoOuterJoin(false);
condn[0] = new JoinCond(0, 1, JoinType.FULLOUTER);
break;
case HiveParser.TOK_LEFTSEMIJOIN:
joinTree.setNoSemiJoin(false);
condn[0] = new JoinCond(0, 1, JoinType.LEFTSEMI);
break;
default:
condn[0] = new JoinCond(0, 1, JoinType.INNER);
joinTree.setNoOuterJoin(true);
break;
}
joinTree.setJoinCond(condn);
ASTNode left = (ASTNode) joinParseTree.getChild(0);
ASTNode right = (ASTNode) joinParseTree.getChild(1);
if ((left.getToken().getType() == HiveParser.TOK_TABREF)
|| (left.getToken().getType() == HiveParser.TOK_SUBQUERY)
|| (left.getToken().getType() == HiveParser.TOK_PTBLFUNCTION)) {
String tableName = getUnescapedUnqualifiedTableName((ASTNode) left.getChild(0))
.toLowerCase();
String alias = extractJoinAlias(left, tableName);
joinTree.setLeftAlias(alias);
String[] leftAliases = new String[1];
leftAliases[0] = alias;
joinTree.setLeftAliases(leftAliases);
String[] children = new String[2];
children[0] = alias;
joinTree.setBaseSrc(children);
joinTree.setId(qb.getId());
joinTree.getAliasToOpInfo().put(
getModifiedAlias(qb, alias), aliasToOpInfo.get(alias));
} else if (isJoinToken(left)) {
QBJoinTree leftTree = genJoinTree(qb, left, aliasToOpInfo);
joinTree.setJoinSrc(leftTree);
String[] leftChildAliases = leftTree.getLeftAliases();
String leftAliases[] = new String[leftChildAliases.length + 1];
for (int i = 0; i < leftChildAliases.length; i++) {
leftAliases[i] = leftChildAliases[i];
}
leftAliases[leftChildAliases.length] = leftTree.getRightAliases()[0];
joinTree.setLeftAliases(leftAliases);
} else {
assert (false);
}
if ((right.getToken().getType() == HiveParser.TOK_TABREF)
|| (right.getToken().getType() == HiveParser.TOK_SUBQUERY)
|| (right.getToken().getType() == HiveParser.TOK_PTBLFUNCTION)) {
String tableName = getUnescapedUnqualifiedTableName((ASTNode) right.getChild(0))
.toLowerCase();
String alias = extractJoinAlias(right, tableName);
String[] rightAliases = new String[1];
rightAliases[0] = alias;
joinTree.setRightAliases(rightAliases);
String[] children = joinTree.getBaseSrc();
if (children == null) {
children = new String[2];
}
children[1] = alias;
joinTree.setBaseSrc(children);
joinTree.setId(qb.getId());
joinTree.getAliasToOpInfo().put(
getModifiedAlias(qb, alias), aliasToOpInfo.get(alias));
// remember rhs table for semijoin
if (joinTree.getNoSemiJoin() == false) {
joinTree.addRHSSemijoin(alias);
}
} else {
assert false;
}
ArrayList<ArrayList<ASTNode>> expressions = new ArrayList<ArrayList<ASTNode>>();
expressions.add(new ArrayList<ASTNode>());
expressions.add(new ArrayList<ASTNode>());
joinTree.setExpressions(expressions);
ArrayList<Boolean> nullsafes = new ArrayList<Boolean>();
joinTree.setNullSafes(nullsafes);
ArrayList<ArrayList<ASTNode>> filters = new ArrayList<ArrayList<ASTNode>>();
filters.add(new ArrayList<ASTNode>());
filters.add(new ArrayList<ASTNode>());
joinTree.setFilters(filters);
joinTree.setFilterMap(new int[2][]);
ArrayList<ArrayList<ASTNode>> filtersForPushing =
new ArrayList<ArrayList<ASTNode>>();
filtersForPushing.add(new ArrayList<ASTNode>());
filtersForPushing.add(new ArrayList<ASTNode>());
joinTree.setFiltersForPushing(filtersForPushing);
ASTNode joinCond = (ASTNode) joinParseTree.getChild(2);
ArrayList<String> leftSrc = new ArrayList<String>();
parseJoinCondition(joinTree, joinCond, leftSrc, aliasToOpInfo);
if (leftSrc.size() == 1) {
joinTree.setLeftAlias(leftSrc.get(0));
}
// check the hints to see if the user has specified a map-side join. This
// will be removed later on, once the cost-based
// infrastructure is in place
if (qb.getParseInfo().getHints() != null) {
List<String> mapSideTables = getMapSideJoinTables(qb);
List<String> mapAliases = joinTree.getMapAliases();
for (String mapTbl : mapSideTables) {
boolean mapTable = false;
for (String leftAlias : joinTree.getLeftAliases()) {
if (mapTbl.equalsIgnoreCase(leftAlias)) {
mapTable = true;
}
}
for (String rightAlias : joinTree.getRightAliases()) {
if (mapTbl.equalsIgnoreCase(rightAlias)) {
mapTable = true;
}
}
if (mapTable) {
if (mapAliases == null) {
mapAliases = new ArrayList<String>();
}
mapAliases.add(mapTbl);
joinTree.setMapSideJoin(true);
}
}
joinTree.setMapAliases(mapAliases);
parseStreamTables(joinTree, qb);
}
return joinTree;
}
private String extractJoinAlias(ASTNode node, String tableName) {
// ptf node form is:
// ^(TOK_PTBLFUNCTION $name $alias? partitionTableFunctionSource partitioningSpec? expression*)
// guaranteed to have an alias here: check done in processJoin
if (node.getType() == HiveParser.TOK_PTBLFUNCTION) {
return unescapeIdentifier(node.getChild(1).getText().toLowerCase());
}
if (node.getChildCount() == 1) {
return tableName;
}
for (int i = node.getChildCount() - 1; i >= 1; i--) {
if (node.getChild(i).getType() == HiveParser.Identifier) {
return unescapeIdentifier(node.getChild(i).getText().toLowerCase());
}
}
return tableName;
}
private void parseStreamTables(QBJoinTree joinTree, QB qb) {
List<String> streamAliases = joinTree.getStreamAliases();
for (Node hintNode : qb.getParseInfo().getHints().getChildren()) {
ASTNode hint = (ASTNode) hintNode;
if (hint.getChild(0).getType() == HiveParser.TOK_STREAMTABLE) {
for (int i = 0; i < hint.getChild(1).getChildCount(); i++) {
if (streamAliases == null) {
streamAliases = new ArrayList<String>();
}
streamAliases.add(hint.getChild(1).getChild(i).getText());
}
}
}
joinTree.setStreamAliases(streamAliases);
}
/**
* Merges node to target
*/
private void mergeJoins(QB qb, QBJoinTree node, QBJoinTree target, int pos, int[] tgtToNodeExprMap) {
String[] nodeRightAliases = node.getRightAliases();
String[] trgtRightAliases = target.getRightAliases();
String[] rightAliases = new String[nodeRightAliases.length
+ trgtRightAliases.length];
for (int i = 0; i < trgtRightAliases.length; i++) {
rightAliases[i] = trgtRightAliases[i];
}
for (int i = 0; i < nodeRightAliases.length; i++) {
rightAliases[i + trgtRightAliases.length] = nodeRightAliases[i];
}
target.setRightAliases(rightAliases);
target.getAliasToOpInfo().putAll(node.getAliasToOpInfo());
String[] nodeBaseSrc = node.getBaseSrc();
String[] trgtBaseSrc = target.getBaseSrc();
String[] baseSrc = new String[nodeBaseSrc.length + trgtBaseSrc.length - 1];
for (int i = 0; i < trgtBaseSrc.length; i++) {
baseSrc[i] = trgtBaseSrc[i];
}
for (int i = 1; i < nodeBaseSrc.length; i++) {
baseSrc[i + trgtBaseSrc.length - 1] = nodeBaseSrc[i];
}
target.setBaseSrc(baseSrc);
ArrayList<ArrayList<ASTNode>> expr = target.getExpressions();
for (int i = 0; i < nodeRightAliases.length; i++) {
List<ASTNode> nodeConds = node.getExpressions().get(i + 1);
ArrayList<ASTNode> reordereNodeConds = new ArrayList<ASTNode>();
for(int k=0; k < tgtToNodeExprMap.length; k++) {
reordereNodeConds.add(nodeConds.get(k));
}
expr.add(reordereNodeConds);
}
ArrayList<Boolean> nns = node.getNullSafes();
ArrayList<Boolean> tns = target.getNullSafes();
for (int i = 0; i < tns.size(); i++) {
tns.set(i, tns.get(i) & nns.get(i)); // any of condition contains non-NS, non-NS
}
ArrayList<ArrayList<ASTNode>> filters = target.getFilters();
for (int i = 0; i < nodeRightAliases.length; i++) {
filters.add(node.getFilters().get(i + 1));
}
if (node.getFilters().get(0).size() != 0) {
ArrayList<ASTNode> filterPos = filters.get(pos);
filterPos.addAll(node.getFilters().get(0));
}
int[][] nmap = node.getFilterMap();
int[][] tmap = target.getFilterMap();
int[][] newmap = new int[tmap.length + nmap.length - 1][];
for (int[] mapping : nmap) {
if (mapping != null) {
for (int i = 0; i < mapping.length; i += 2) {
if (pos > 0 || mapping[i] > 0) {
mapping[i] += trgtRightAliases.length;
}
}
}
}
if (nmap[0] != null) {
if (tmap[pos] == null) {
tmap[pos] = nmap[0];
} else {
int[] appended = new int[tmap[pos].length + nmap[0].length];
System.arraycopy(tmap[pos], 0, appended, 0, tmap[pos].length);
System.arraycopy(nmap[0], 0, appended, tmap[pos].length, nmap[0].length);
tmap[pos] = appended;
}
}
System.arraycopy(tmap, 0, newmap, 0, tmap.length);
System.arraycopy(nmap, 1, newmap, tmap.length, nmap.length - 1);
target.setFilterMap(newmap);
ArrayList<ArrayList<ASTNode>> filter = target.getFiltersForPushing();
for (int i = 0; i < nodeRightAliases.length; i++) {
filter.add(node.getFiltersForPushing().get(i + 1));
}
if (node.getFiltersForPushing().get(0).size() != 0) {
/*
* for each predicate:
* - does it refer to one or many aliases
* - if one: add it to the filterForPushing list of that alias
* - if many: add as a filter from merging trees.
*/
for(ASTNode nodeFilter : node.getFiltersForPushing().get(0) ) {
int fPos = ParseUtils.checkJoinFilterRefersOneAlias(target.getBaseSrc(), nodeFilter);
if ( fPos != - 1 ) {
filter.get(fPos).add(nodeFilter);
} else {
target.addPostJoinFilter(nodeFilter);
}
}
}
if (node.getNoOuterJoin() && target.getNoOuterJoin()) {
target.setNoOuterJoin(true);
} else {
target.setNoOuterJoin(false);
}
if (node.getNoSemiJoin() && target.getNoSemiJoin()) {
target.setNoSemiJoin(true);
} else {
target.setNoSemiJoin(false);
}
target.mergeRHSSemijoin(node);
JoinCond[] nodeCondns = node.getJoinCond();
int nodeCondnsSize = nodeCondns.length;
JoinCond[] targetCondns = target.getJoinCond();
int targetCondnsSize = targetCondns.length;
JoinCond[] newCondns = new JoinCond[nodeCondnsSize + targetCondnsSize];
for (int i = 0; i < targetCondnsSize; i++) {
newCondns[i] = targetCondns[i];
}
for (int i = 0; i < nodeCondnsSize; i++) {
JoinCond nodeCondn = nodeCondns[i];
if (nodeCondn.getLeft() == 0) {
nodeCondn.setLeft(pos);
} else {
nodeCondn.setLeft(nodeCondn.getLeft() + targetCondnsSize);
}
nodeCondn.setRight(nodeCondn.getRight() + targetCondnsSize);
newCondns[targetCondnsSize + i] = nodeCondn;
}
target.setJoinCond(newCondns);
if (target.isMapSideJoin()) {
assert node.isMapSideJoin();
List<String> mapAliases = target.getMapAliases();
for (String mapTbl : node.getMapAliases()) {
if (!mapAliases.contains(mapTbl)) {
mapAliases.add(mapTbl);
}
}
target.setMapAliases(mapAliases);
}
}
private ObjectPair<Integer, int[]> findMergePos(QBJoinTree node, QBJoinTree target) {
int res = -1;
String leftAlias = node.getLeftAlias();
if (leftAlias == null) {
return new ObjectPair(-1, null);
}
ArrayList<ASTNode> nodeCondn = node.getExpressions().get(0);
ArrayList<ASTNode> targetCondn = null;
if (leftAlias.equals(target.getLeftAlias())) {
targetCondn = target.getExpressions().get(0);
res = 0;
} else {
for (int i = 0; i < target.getRightAliases().length; i++) {
if (leftAlias.equals(target.getRightAliases()[i])) {
targetCondn = target.getExpressions().get(i + 1);
res = i + 1;
break;
}
}
}
if ( targetCondn == null ) {
return new ObjectPair(-1, null);
}
/*
* The order of the join condition expressions don't matter.
* A merge can happen:
* - if every target condition is present in some position of the node condition list.
* - there is no node condition, which is not equal to any target condition.
*/
int[] tgtToNodeExprMap = new int[targetCondn.size()];
boolean[] nodeFiltersMapped = new boolean[nodeCondn.size()];
int i, j;
for(i=0; i<targetCondn.size(); i++) {
String tgtExprTree = targetCondn.get(i).toStringTree();
tgtToNodeExprMap[i] = -1;
for(j=0; j < nodeCondn.size(); j++) {
if ( nodeCondn.get(j).toStringTree().equals(tgtExprTree)) {
tgtToNodeExprMap[i] = j;
nodeFiltersMapped[j] = true;
}
}
if ( tgtToNodeExprMap[i] == -1) {
return new ObjectPair(-1, null);
}
}
for(j=0; j < nodeCondn.size(); j++) {
if ( !nodeFiltersMapped[j]) {
return new ObjectPair(-1, null);
}
}
return new ObjectPair(res, tgtToNodeExprMap);
}
// try merge join tree from inner most source
// (it was merged from outer most to inner, which could be invalid)
//
// in a join tree ((A-B)-C)-D where C is not mergeable with A-B,
// D can be merged with A-B into single join If and only if C and D has same join type
// In this case, A-B-D join will be executed first and ABD-C join will be executed in next
private void mergeJoinTree(QB qb) {
QBJoinTree tree = qb.getQbJoinTree();
if (tree.getJoinSrc() == null) {
return;
}
// make array with QBJoinTree : outer most(0) --> inner most(n)
List<QBJoinTree> trees = new ArrayList<QBJoinTree>();
for (;tree != null; tree = tree.getJoinSrc()) {
trees.add(tree);
}
// merging from 'target'(inner) to 'node'(outer)
for (int i = trees.size() - 1; i >= 0; i--) {
QBJoinTree target = trees.get(i);
if (target == null) {
continue;
}
JoinType prevType = null; // save join type
for (int j = i - 1; j >= 0; j--) {
QBJoinTree node = trees.get(j);
if (node == null) {
continue;
}
JoinType currType = getType(node.getJoinCond());
if (prevType != null && prevType != currType) {
break;
}
ObjectPair<Integer, int[]> mergeDetails = findMergePos(node, target);
int pos = mergeDetails.getFirst();
if (pos >= 0) {
// for outer joins, it should not exceed 16 aliases (short type)
if (!node.getNoOuterJoin() || !target.getNoOuterJoin()) {
if (node.getRightAliases().length + target.getRightAliases().length + 1 > 16) {
LOG.info(ErrorMsg.JOINNODE_OUTERJOIN_MORETHAN_16);
continue;
}
}
mergeJoins(qb, node, target, pos, mergeDetails.getSecond());
trees.set(j, null);
continue; // continue merging with next alias
}
if (prevType == null) {
prevType = currType;
}
}
}
// reconstruct join tree
QBJoinTree current = null;
for (int i = 0; i < trees.size(); i++) {
QBJoinTree target = trees.get(i);
if (target == null) {
continue;
}
if (current == null) {
qb.setQbJoinTree(current = target);
} else {
current.setJoinSrc(target);
current = target;
}
}
}
// Join types should be all the same for merging (or returns null)
private JoinType getType(JoinCond[] conds) {
JoinType type = conds[0].getJoinType();
for (int k = 1; k < conds.length; k++) {
if (type != conds[k].getJoinType()) {
return null;
}
}
return type;
}
private Operator insertSelectAllPlanForGroupBy(Operator input)
throws SemanticException {
OpParseContext inputCtx = opParseCtx.get(input);
RowResolver inputRR = inputCtx.getRowResolver();
ArrayList<ColumnInfo> columns = inputRR.getColumnInfos();
ArrayList<ExprNodeDesc> colList = new ArrayList<ExprNodeDesc>();
ArrayList<String> columnNames = new ArrayList<String>();
Map<String, ExprNodeDesc> columnExprMap =
new HashMap<String, ExprNodeDesc>();
for (int i = 0; i < columns.size(); i++) {
ColumnInfo col = columns.get(i);
colList.add(new ExprNodeColumnDesc(col.getType(), col.getInternalName(),
col.getTabAlias(), col.getIsVirtualCol()));
columnNames.add(col.getInternalName());
columnExprMap.put(col.getInternalName(),
new ExprNodeColumnDesc(col.getType(), col.getInternalName(),
col.getTabAlias(), col.getIsVirtualCol()));
}
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
new SelectDesc(colList, columnNames, true), new RowSchema(inputRR
.getColumnInfos()), input), inputRR);
output.setColumnExprMap(columnExprMap);
return output;
}
// Return the common distinct expression
// There should be more than 1 destination, with group bys in all of them.
private List<ASTNode> getCommonDistinctExprs(QB qb, Operator input) {
QBParseInfo qbp = qb.getParseInfo();
// If a grouping set aggregation is present, common processing is not possible
if (!qbp.getDestCubes().isEmpty() || !qbp.getDestRollups().isEmpty()
|| !qbp.getDestToLateralView().isEmpty()) {
return null;
}
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
TreeSet<String> ks = new TreeSet<String>();
ks.addAll(qbp.getClauseNames());
// Go over all the destination tables
if (ks.size() <= 1) {
return null;
}
List<ExprNodeDesc> oldList = null;
List<ASTNode> oldASTList = null;
for (String dest : ks) {
// If a filter is present, common processing is not possible
if (qbp.getWhrForClause(dest) != null) {
return null;
}
if (qbp.getAggregationExprsForClause(dest).size() == 0
&& getGroupByForClause(qbp, dest).size() == 0) {
return null;
}
// All distinct expressions must be the same
List<ASTNode> list = qbp.getDistinctFuncExprsForClause(dest);
if (list.isEmpty()) {
return null;
}
List<ExprNodeDesc> currDestList;
try {
currDestList = getDistinctExprs(qbp, dest, inputRR);
} catch (SemanticException e) {
return null;
}
List<ASTNode> currASTList = new ArrayList<ASTNode>();
for (ASTNode value : list) {
// 0 is function name
for (int i = 1; i < value.getChildCount(); i++) {
ASTNode parameter = (ASTNode) value.getChild(i);
currASTList.add(parameter);
}
if (oldList == null) {
oldList = currDestList;
oldASTList = currASTList;
} else {
if (!matchExprLists(oldList, currDestList)) {
return null;
}
}
}
}
return oldASTList;
}
private Operator createCommonReduceSink(QB qb, Operator input)
throws SemanticException {
// Go over all the tables and extract the common distinct key
List<ASTNode> distExprs = getCommonDistinctExprs(qb, input);
QBParseInfo qbp = qb.getParseInfo();
TreeSet<String> ks = new TreeSet<String>();
ks.addAll(qbp.getClauseNames());
// Pass the entire row
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
RowResolver reduceSinkOutputRowResolver = new RowResolver();
reduceSinkOutputRowResolver.setIsExprResolver(true);
ArrayList<ExprNodeDesc> reduceKeys = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> reduceValues = new ArrayList<ExprNodeDesc>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
// Pre-compute distinct group-by keys and store in reduceKeys
List<String> outputColumnNames = new ArrayList<String>();
for (ASTNode distn : distExprs) {
ExprNodeDesc distExpr = genExprNodeDesc(distn, inputRR);
if (reduceSinkOutputRowResolver.getExpression(distn) == null) {
reduceKeys.add(distExpr);
outputColumnNames.add(getColumnInternalName(reduceKeys.size() - 1));
String field = Utilities.ReduceField.KEY.toString() + "."
+ getColumnInternalName(reduceKeys.size() - 1);
ColumnInfo colInfo = new ColumnInfo(field, reduceKeys.get(
reduceKeys.size() - 1).getTypeInfo(), "", false);
reduceSinkOutputRowResolver.putExpression(distn, colInfo);
colExprMap.put(colInfo.getInternalName(), distExpr);
}
}
// Go over all the grouping keys and aggregations
for (String dest : ks) {
List<ASTNode> grpByExprs = getGroupByForClause(qbp, dest);
for (int i = 0; i < grpByExprs.size(); ++i) {
ASTNode grpbyExpr = grpByExprs.get(i);
if (reduceSinkOutputRowResolver.getExpression(grpbyExpr) == null) {
ExprNodeDesc grpByExprNode = genExprNodeDesc(grpbyExpr, inputRR);
reduceValues.add(grpByExprNode);
String field = Utilities.ReduceField.VALUE.toString() + "."
+ getColumnInternalName(reduceValues.size() - 1);
ColumnInfo colInfo = new ColumnInfo(field, reduceValues.get(
reduceValues.size() - 1).getTypeInfo(), "", false);
reduceSinkOutputRowResolver.putExpression(grpbyExpr, colInfo);
outputColumnNames.add(getColumnInternalName(reduceValues.size() - 1));
colExprMap.put(field, grpByExprNode);
}
}
// For each aggregation
HashMap<String, ASTNode> aggregationTrees = qbp
.getAggregationExprsForClause(dest);
assert (aggregationTrees != null);
for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
ASTNode value = entry.getValue();
// 0 is the function name
for (int i = 1; i < value.getChildCount(); i++) {
ASTNode paraExpr = (ASTNode) value.getChild(i);
if (reduceSinkOutputRowResolver.getExpression(paraExpr) == null) {
ExprNodeDesc paraExprNode = genExprNodeDesc(paraExpr, inputRR);
reduceValues.add(paraExprNode);
String field = Utilities.ReduceField.VALUE.toString() + "."
+ getColumnInternalName(reduceValues.size() - 1);
ColumnInfo colInfo = new ColumnInfo(field, reduceValues.get(
reduceValues.size() - 1).getTypeInfo(), "", false);
reduceSinkOutputRowResolver.putExpression(paraExpr, colInfo);
outputColumnNames
.add(getColumnInternalName(reduceValues.size() - 1));
colExprMap.put(field, paraExprNode);
}
}
}
}
ReduceSinkOperator rsOp = (ReduceSinkOperator) putOpInsertMap(
OperatorFactory.getAndMakeChild(PlanUtils.getReduceSinkDesc(reduceKeys,
reduceValues, outputColumnNames, true, -1, reduceKeys.size(), -1),
new RowSchema(reduceSinkOutputRowResolver.getColumnInfos()), input),
reduceSinkOutputRowResolver);
rsOp.setColumnExprMap(colExprMap);
return rsOp;
}
// Groups the clause names into lists so that any two clauses in the same list has the same
// group by and distinct keys and no clause appears in more than one list. Returns a list of the
// lists of clauses.
private List<List<String>> getCommonGroupByDestGroups(QB qb,
Map<String, Operator<? extends OperatorDesc>> inputs) throws SemanticException {
QBParseInfo qbp = qb.getParseInfo();
TreeSet<String> ks = new TreeSet<String>();
ks.addAll(qbp.getClauseNames());
List<List<String>> commonGroupByDestGroups = new ArrayList<List<String>>();
// If this is a trivial query block return
if (ks.size() <= 1) {
List<String> oneList = new ArrayList<String>(1);
if (ks.size() == 1) {
oneList.add(ks.first());
}
commonGroupByDestGroups.add(oneList);
return commonGroupByDestGroups;
}
List<Operator<? extends OperatorDesc>> inputOperators =
new ArrayList<Operator<? extends OperatorDesc>>(ks.size());
List<List<ExprNodeDesc>> sprayKeyLists = new ArrayList<List<ExprNodeDesc>>(ks.size());
List<List<ExprNodeDesc>> distinctKeyLists = new ArrayList<List<ExprNodeDesc>>(ks.size());
// Iterate over each clause
for (String dest : ks) {
Operator input = inputs.get(dest);
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
List<ExprNodeDesc> distinctKeys = getDistinctExprs(qbp, dest, inputRR);
List<ExprNodeDesc> sprayKeys = new ArrayList<ExprNodeDesc>();
// Add the group by expressions
List<ASTNode> grpByExprs = getGroupByForClause(qbp, dest);
for (ASTNode grpByExpr : grpByExprs) {
ExprNodeDesc exprDesc = genExprNodeDesc(grpByExpr, inputRR);
if (ExprNodeDescUtils.indexOf(exprDesc, sprayKeys) < 0) {
sprayKeys.add(exprDesc);
}
}
// Loop through each of the lists of exprs, looking for a match
boolean found = false;
for (int i = 0; i < sprayKeyLists.size(); i++) {
if (!input.equals(inputOperators.get(i))) {
continue;
}
if (distinctKeys.isEmpty()) {
// current dest has no distinct keys.
List<ExprNodeDesc> combinedList = new ArrayList<ExprNodeDesc>();
combineExprNodeLists(sprayKeyLists.get(i), distinctKeyLists.get(i), combinedList);
if (!matchExprLists(combinedList, sprayKeys)) {
continue;
} // else do the common code at the end.
} else {
if (distinctKeyLists.get(i).isEmpty()) {
List<ExprNodeDesc> combinedList = new ArrayList<ExprNodeDesc>();
combineExprNodeLists(sprayKeys, distinctKeys, combinedList);
if (!matchExprLists(combinedList, sprayKeyLists.get(i))) {
continue;
} else {
// we have found a match. insert this distinct clause to head.
distinctKeyLists.remove(i);
sprayKeyLists.remove(i);
distinctKeyLists.add(i, distinctKeys);
sprayKeyLists.add(i, sprayKeys);
commonGroupByDestGroups.get(i).add(0, dest);
found = true;
break;
}
} else {
if (!matchExprLists(distinctKeyLists.get(i), distinctKeys)) {
continue;
}
if (!matchExprLists(sprayKeyLists.get(i), sprayKeys)) {
continue;
}
// else do common code
}
}
// common code
// A match was found, so add the clause to the corresponding list
commonGroupByDestGroups.get(i).add(dest);
found = true;
break;
}
// No match was found, so create new entries
if (!found) {
inputOperators.add(input);
sprayKeyLists.add(sprayKeys);
distinctKeyLists.add(distinctKeys);
List<String> destGroup = new ArrayList<String>();
destGroup.add(dest);
commonGroupByDestGroups.add(destGroup);
}
}
return commonGroupByDestGroups;
}
private void combineExprNodeLists(List<ExprNodeDesc> list, List<ExprNodeDesc> list2,
List<ExprNodeDesc> combinedList) {
combinedList.addAll(list);
for (ExprNodeDesc elem : list2) {
if (!combinedList.contains(elem)) {
combinedList.add(elem);
}
}
}
// Returns whether or not two lists contain the same elements independent of order
private boolean matchExprLists(List<ExprNodeDesc> list1, List<ExprNodeDesc> list2) {
if (list1.size() != list2.size()) {
return false;
}
for (ExprNodeDesc exprNodeDesc : list1) {
if (ExprNodeDescUtils.indexOf(exprNodeDesc, list2) < 0) {
return false;
}
}
return true;
}
// Returns a list of the distinct exprs without duplicates for a given clause name
private List<ExprNodeDesc> getDistinctExprs(QBParseInfo qbp, String dest, RowResolver inputRR)
throws SemanticException {
List<ASTNode> distinctAggExprs = qbp.getDistinctFuncExprsForClause(dest);
List<ExprNodeDesc> distinctExprs = new ArrayList<ExprNodeDesc>();
for (ASTNode distinctAggExpr : distinctAggExprs) {
// 0 is function name
for (int i = 1; i < distinctAggExpr.getChildCount(); i++) {
ASTNode parameter = (ASTNode) distinctAggExpr.getChild(i);
ExprNodeDesc expr = genExprNodeDesc(parameter, inputRR);
if (ExprNodeDescUtils.indexOf(expr, distinctExprs) < 0) {
distinctExprs.add(expr);
}
}
}
return distinctExprs;
}
// see if there are any distinct expressions
private boolean distinctExprsExists(QB qb) {
QBParseInfo qbp = qb.getParseInfo();
TreeSet<String> ks = new TreeSet<String>();
ks.addAll(qbp.getClauseNames());
for (String dest : ks) {
List<ASTNode> list = qbp.getDistinctFuncExprsForClause(dest);
if (!list.isEmpty()) {
return true;
}
}
return false;
}
@SuppressWarnings("nls")
private Operator genBodyPlan(QB qb, Operator input, Map<String, Operator> aliasToOpInfo)
throws SemanticException {
QBParseInfo qbp = qb.getParseInfo();
TreeSet<String> ks = new TreeSet<String>(qbp.getClauseNames());
Map<String, Operator<? extends OperatorDesc>> inputs = createInputForDests(qb, input, ks);
// For multi-group by with the same distinct, we ignore all user hints
// currently. It doesnt matter whether he has asked to do
// map-side aggregation or not. Map side aggregation is turned off
List<ASTNode> commonDistinctExprs = getCommonDistinctExprs(qb, input);
// Consider a query like:
//
// from src
// insert overwrite table dest1 select col1, count(distinct colx) group by col1
// insert overwrite table dest2 select col2, count(distinct colx) group by col2;
//
// With HIVE_OPTIMIZE_MULTI_GROUPBY_COMMON_DISTINCTS set to true, first we spray by the distinct
// value (colx), and then perform the 2 groups bys. This makes sense if map-side aggregation is
// turned off. However, with maps-side aggregation, it might be useful in some cases to treat
// the 2 inserts independently, thereby performing the query above in 2MR jobs instead of 3
// (due to spraying by distinct key first).
boolean optimizeMultiGroupBy = commonDistinctExprs != null &&
conf.getBoolVar(HiveConf.ConfVars.HIVE_OPTIMIZE_MULTI_GROUPBY_COMMON_DISTINCTS);
Operator curr = input;
// if there is a single distinct, optimize that. Spray initially by the
// distinct key,
// no computation at the mapper. Have multiple group by operators at the
// reducer - and then
// proceed
if (optimizeMultiGroupBy) {
curr = createCommonReduceSink(qb, input);
RowResolver currRR = opParseCtx.get(curr).getRowResolver();
// create a forward operator
input = putOpInsertMap(OperatorFactory.getAndMakeChild(new ForwardDesc(),
new RowSchema(currRR.getColumnInfos()), curr), currRR);
for (String dest : ks) {
curr = input;
curr = genGroupByPlan2MRMultiGroupBy(dest, qb, curr);
curr = genSelectPlan(dest, qb, curr);
Integer limit = qbp.getDestLimit(dest);
if (limit != null) {
curr = genLimitMapRedPlan(dest, qb, curr, limit.intValue(), true);
qb.getParseInfo().setOuterQueryLimit(limit.intValue());
}
curr = genFileSinkPlan(dest, qb, curr);
}
} else {
List<List<String>> commonGroupByDestGroups = null;
// If we can put multiple group bys in a single reducer, determine suitable groups of
// expressions, otherwise treat all the expressions as a single group
if (conf.getBoolVar(HiveConf.ConfVars.HIVEMULTIGROUPBYSINGLEREDUCER)) {
try {
commonGroupByDestGroups = getCommonGroupByDestGroups(qb, inputs);
} catch (SemanticException e) {
LOG.error("Failed to group clauses by common spray keys.", e);
}
}
if (commonGroupByDestGroups == null) {
commonGroupByDestGroups = new ArrayList<List<String>>();
commonGroupByDestGroups.add(new ArrayList<String>(ks));
}
if (!commonGroupByDestGroups.isEmpty()) {
// Iterate over each group of subqueries with the same group by/distinct keys
for (List<String> commonGroupByDestGroup : commonGroupByDestGroups) {
if (commonGroupByDestGroup.isEmpty()) {
continue;
}
String firstDest = commonGroupByDestGroup.get(0);
input = inputs.get(firstDest);
// Constructs a standard group by plan if:
// There is no other subquery with the same group by/distinct keys or
// (There are no aggregations in a representative query for the group and
// There is no group by in that representative query) or
// The data is skewed or
// The conf variable used to control combining group bys into a single reducer is false
if (commonGroupByDestGroup.size() == 1 ||
(qbp.getAggregationExprsForClause(firstDest).size() == 0 &&
getGroupByForClause(qbp, firstDest).size() == 0) ||
conf.getBoolVar(HiveConf.ConfVars.HIVEGROUPBYSKEW) ||
!conf.getBoolVar(HiveConf.ConfVars.HIVEMULTIGROUPBYSINGLEREDUCER)) {
// Go over all the destination tables
for (String dest : commonGroupByDestGroup) {
curr = inputs.get(dest);
if (qbp.getWhrForClause(dest) != null) {
ASTNode whereExpr = qb.getParseInfo().getWhrForClause(dest);
curr = genFilterPlan((ASTNode) whereExpr.getChild(0), qb, curr, aliasToOpInfo, false);
}
if (qbp.getAggregationExprsForClause(dest).size() != 0
|| getGroupByForClause(qbp, dest).size() > 0) {
// multiple distincts is not supported with skew in data
if (conf.getBoolVar(HiveConf.ConfVars.HIVEGROUPBYSKEW) &&
qbp.getDistinctFuncExprsForClause(dest).size() > 1) {
throw new SemanticException(ErrorMsg.UNSUPPORTED_MULTIPLE_DISTINCTS.
getMsg());
}
// insert a select operator here used by the ColumnPruner to reduce
// the data to shuffle
curr = insertSelectAllPlanForGroupBy(curr);
if (conf.getBoolVar(HiveConf.ConfVars.HIVEMAPSIDEAGGREGATE)) {
if (!conf.getBoolVar(HiveConf.ConfVars.HIVEGROUPBYSKEW)) {
curr = genGroupByPlanMapAggrNoSkew(dest, qb, curr);
} else {
curr = genGroupByPlanMapAggr2MR(dest, qb, curr);
}
} else if (conf.getBoolVar(HiveConf.ConfVars.HIVEGROUPBYSKEW)) {
curr = genGroupByPlan2MR(dest, qb, curr);
} else {
curr = genGroupByPlan1MR(dest, qb, curr);
}
}
curr = genPostGroupByBodyPlan(curr, dest, qb, aliasToOpInfo);
}
} else {
curr = genGroupByPlan1ReduceMultiGBY(commonGroupByDestGroup, qb, input, aliasToOpInfo);
}
}
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Created Body Plan for Query Block " + qb.getId());
}
return curr;
}
private Map<String, Operator<? extends OperatorDesc>> createInputForDests(QB qb,
Operator<? extends OperatorDesc> input, Set<String> dests) throws SemanticException {
Map<String, Operator<? extends OperatorDesc>> inputs =
new HashMap<String, Operator<? extends OperatorDesc>>();
for (String dest : dests) {
inputs.put(dest, genLateralViewPlanForDest(dest, qb, input));
}
return inputs;
}
private Operator genPostGroupByBodyPlan(Operator curr, String dest, QB qb,
Map<String, Operator> aliasToOpInfo)
throws SemanticException {
QBParseInfo qbp = qb.getParseInfo();
// Insert HAVING plan here
if (qbp.getHavingForClause(dest) != null) {
if (getGroupByForClause(qbp, dest).size() == 0) {
throw new SemanticException("HAVING specified without GROUP BY");
}
curr = genHavingPlan(dest, qb, curr, aliasToOpInfo);
}
if(queryProperties.hasWindowing() && qb.getWindowingSpec(dest) != null) {
curr = genWindowingPlan(qb.getWindowingSpec(dest), curr);
}
curr = genSelectPlan(dest, qb, curr);
Integer limit = qbp.getDestLimit(dest);
// Expressions are not supported currently without a alias.
// Reduce sink is needed if the query contains a cluster by, distribute by,
// order by or a sort by clause.
boolean genReduceSink = false;
// Currently, expressions are not allowed in cluster by, distribute by,
// order by or a sort by clause. For each of the above clause types, check
// if the clause contains any expression.
if (qbp.getClusterByForClause(dest) != null) {
genReduceSink = true;
}
if (qbp.getDistributeByForClause(dest) != null) {
genReduceSink = true;
}
if (qbp.getOrderByForClause(dest) != null) {
genReduceSink = true;
}
if (qbp.getSortByForClause(dest) != null) {
genReduceSink = true;
}
if (genReduceSink) {
int numReducers = -1;
// Use only 1 reducer if order by is present
if (qbp.getOrderByForClause(dest) != null) {
numReducers = 1;
}
curr = genReduceSinkPlan(dest, qb, curr, numReducers);
}
if (qbp.getIsSubQ()) {
if (limit != null) {
// In case of order by, only 1 reducer is used, so no need of
// another shuffle
curr = genLimitMapRedPlan(dest, qb, curr, limit.intValue(), qbp
.getOrderByForClause(dest) != null ? false : true);
}
} else {
curr = genConversionOps(dest, qb, curr);
// exact limit can be taken care of by the fetch operator
if (limit != null) {
boolean extraMRStep = true;
if (qbp.getOrderByForClause(dest) != null ||
qb.getIsQuery() && qbp.getClusterByForClause(dest) == null &&
qbp.getSortByForClause(dest) == null) {
extraMRStep = false;
}
curr = genLimitMapRedPlan(dest, qb, curr, limit.intValue(),
extraMRStep);
qb.getParseInfo().setOuterQueryLimit(limit.intValue());
}
if (!SessionState.get().getHiveOperation().equals(HiveOperation.CREATEVIEW)) {
curr = genFileSinkPlan(dest, qb, curr);
}
}
// change curr ops row resolver's tab aliases to query alias if it
// exists
if (qb.getParseInfo().getAlias() != null) {
RowResolver rr = opParseCtx.get(curr).getRowResolver();
RowResolver newRR = new RowResolver();
String alias = qb.getParseInfo().getAlias();
for (ColumnInfo colInfo : rr.getColumnInfos()) {
String name = colInfo.getInternalName();
String[] tmp = rr.reverseLookup(name);
if ("".equals(tmp[0]) || tmp[1] == null) {
// ast expression is not a valid column name for table
tmp[1] = colInfo.getInternalName();
}
newRR.put(alias, tmp[1], colInfo);
}
opParseCtx.get(curr).setRowResolver(newRR);
}
return curr;
}
@SuppressWarnings("nls")
private Operator genUnionPlan(String unionalias, String leftalias,
Operator leftOp, String rightalias, Operator rightOp)
throws SemanticException {
// Currently, the unions are not merged - each union has only 2 parents. So,
// a n-way union will lead to (n-1) union operators.
// This can be easily merged into 1 union
RowResolver leftRR = opParseCtx.get(leftOp).getRowResolver();
RowResolver rightRR = opParseCtx.get(rightOp).getRowResolver();
HashMap<String, ColumnInfo> leftmap = leftRR.getFieldMap(leftalias);
HashMap<String, ColumnInfo> rightmap = rightRR.getFieldMap(rightalias);
// make sure the schemas of both sides are the same
ASTNode tabref = qb.getAliases().isEmpty() ? null :
qb.getParseInfo().getSrcForAlias(qb.getAliases().get(0));
if (leftmap.size() != rightmap.size()) {
throw new SemanticException("Schema of both sides of union should match.");
}
for (Map.Entry<String, ColumnInfo> lEntry : leftmap.entrySet()) {
String field = lEntry.getKey();
ColumnInfo lInfo = lEntry.getValue();
ColumnInfo rInfo = rightmap.get(field);
if (rInfo == null) {
throw new SemanticException(generateErrorMessage(tabref,
"Schema of both sides of union should match. " + rightalias
+ " does not have the field " + field));
}
if (lInfo == null) {
throw new SemanticException(generateErrorMessage(tabref,
"Schema of both sides of union should match. " + leftalias
+ " does not have the field " + field));
}
if (!lInfo.getInternalName().equals(rInfo.getInternalName())) {
throw new SemanticException(generateErrorMessage(tabref,
"Schema of both sides of union should match: field " + field + ":"
+ " appears on the left side of the UNION at column position: " +
getPositionFromInternalName(lInfo.getInternalName())
+ ", and on the right side of the UNION at column position: " +
getPositionFromInternalName(rInfo.getInternalName())
+ ". Column positions should match for a UNION"));
}
// try widening coversion, otherwise fail union
TypeInfo commonTypeInfo = FunctionRegistry.getCommonClassForUnionAll(lInfo.getType(),
rInfo.getType());
if (commonTypeInfo == null) {
throw new SemanticException(generateErrorMessage(tabref,
"Schema of both sides of union should match: Column " + field
+ " is of type " + lInfo.getType().getTypeName()
+ " on first table and type " + rInfo.getType().getTypeName()
+ " on second table"));
}
}
// construct the forward operator
RowResolver unionoutRR = new RowResolver();
for (Map.Entry<String, ColumnInfo> lEntry : leftmap.entrySet()) {
String field = lEntry.getKey();
ColumnInfo lInfo = lEntry.getValue();
ColumnInfo rInfo = rightmap.get(field);
ColumnInfo unionColInfo = new ColumnInfo(lInfo);
unionColInfo.setType(FunctionRegistry.getCommonClassForUnionAll(lInfo.getType(),
rInfo.getType()));
unionoutRR.put(unionalias, field, unionColInfo);
}
if (!(leftOp instanceof UnionOperator)) {
leftOp = genInputSelectForUnion(leftOp, leftmap, leftalias, unionoutRR, unionalias);
}
if (!(rightOp instanceof UnionOperator)) {
rightOp = genInputSelectForUnion(rightOp, rightmap, rightalias, unionoutRR, unionalias);
}
// If one of the children is a union, merge with it
// else create a new one
if ((leftOp instanceof UnionOperator) || (rightOp instanceof UnionOperator)) {
if (leftOp instanceof UnionOperator) {
// make left a child of right
List<Operator<? extends OperatorDesc>> child =
new ArrayList<Operator<? extends OperatorDesc>>();
child.add(leftOp);
rightOp.setChildOperators(child);
List<Operator<? extends OperatorDesc>> parent = leftOp
.getParentOperators();
parent.add(rightOp);
UnionDesc uDesc = ((UnionOperator) leftOp).getConf();
uDesc.setNumInputs(uDesc.getNumInputs() + 1);
return putOpInsertMap(leftOp, unionoutRR);
} else {
// make right a child of left
List<Operator<? extends OperatorDesc>> child =
new ArrayList<Operator<? extends OperatorDesc>>();
child.add(rightOp);
leftOp.setChildOperators(child);
List<Operator<? extends OperatorDesc>> parent = rightOp
.getParentOperators();
parent.add(leftOp);
UnionDesc uDesc = ((UnionOperator) rightOp).getConf();
uDesc.setNumInputs(uDesc.getNumInputs() + 1);
return putOpInsertMap(rightOp, unionoutRR);
}
}
// Create a new union operator
Operator<? extends OperatorDesc> unionforward = OperatorFactory
.getAndMakeChild(new UnionDesc(), new RowSchema(unionoutRR
.getColumnInfos()));
// set union operator as child of each of leftOp and rightOp
List<Operator<? extends OperatorDesc>> child =
new ArrayList<Operator<? extends OperatorDesc>>();
child.add(unionforward);
rightOp.setChildOperators(child);
child = new ArrayList<Operator<? extends OperatorDesc>>();
child.add(unionforward);
leftOp.setChildOperators(child);
List<Operator<? extends OperatorDesc>> parent =
new ArrayList<Operator<? extends OperatorDesc>>();
parent.add(leftOp);
parent.add(rightOp);
unionforward.setParentOperators(parent);
// create operator info list to return
return putOpInsertMap(unionforward, unionoutRR);
}
/**
* Generates a select operator which can go between the original input operator and the union
* operator. This select casts columns to match the type of the associated column in the union,
* other columns pass through unchanged. The new operator's only parent is the original input
* operator to the union, and it's only child is the union. If the input does not need to be
* cast, the original operator is returned, and no new select operator is added.
*
* @param origInputOp
* The original input operator to the union.
* @param origInputFieldMap
* A map from field name to ColumnInfo for the original input operator.
* @param origInputAlias
* The alias associated with the original input operator.
* @param unionoutRR
* The union's output row resolver.
* @param unionalias
* The alias of the union.
* @return
* @throws SemanticException
*/
private Operator<? extends OperatorDesc> genInputSelectForUnion(
Operator<? extends OperatorDesc> origInputOp, Map<String, ColumnInfo> origInputFieldMap,
String origInputAlias, RowResolver unionoutRR, String unionalias)
throws SemanticException {
List<ExprNodeDesc> columns = new ArrayList<ExprNodeDesc>();
boolean needsCast = false;
for (Map.Entry<String, ColumnInfo> unionEntry : unionoutRR.getFieldMap(unionalias).entrySet()) {
String field = unionEntry.getKey();
ColumnInfo lInfo = origInputFieldMap.get(field);
ExprNodeDesc column = new ExprNodeColumnDesc(lInfo.getType(), lInfo.getInternalName(),
lInfo.getTabAlias(), lInfo.getIsVirtualCol(), lInfo.isSkewedCol());
if (!lInfo.getType().equals(unionEntry.getValue().getType())) {
needsCast = true;
column = ParseUtils.createConversionCast(
column, (PrimitiveTypeInfo)unionEntry.getValue().getType());
}
columns.add(column);
}
// If none of the columns need to be cast there's no need for an additional select operator
if (!needsCast) {
return origInputOp;
}
RowResolver rowResolver = new RowResolver();
Map<String, ExprNodeDesc> columnExprMap = new HashMap<String, ExprNodeDesc>();
List<String> colName = new ArrayList<String>();
for (int i = 0; i < columns.size(); i++) {
String name = getColumnInternalName(i);
ColumnInfo col = new ColumnInfo(name, columns.get(i)
.getTypeInfo(), "", false);
rowResolver.put(origInputAlias, name, col);
colName.add(name);
columnExprMap.put(name, columns.get(i));
}
Operator<SelectDesc> newInputOp = OperatorFactory.getAndMakeChild(
new SelectDesc(columns, colName), new RowSchema(rowResolver.getColumnInfos()),
columnExprMap, origInputOp);
return putOpInsertMap(newInputOp, rowResolver);
}
/**
* Generates the sampling predicate from the TABLESAMPLE clause information.
* This function uses the bucket column list to decide the expression inputs
* to the predicate hash function in case useBucketCols is set to true,
* otherwise the expression list stored in the TableSample is used. The bucket
* columns of the table are used to generate this predicate in case no
* expressions are provided on the TABLESAMPLE clause and the table has
* clustering columns defined in it's metadata. The predicate created has the
* following structure:
*
* ((hash(expressions) & Integer.MAX_VALUE) % denominator) == numerator
*
* @param ts
* TABLESAMPLE clause information
* @param bucketCols
* The clustering columns of the table
* @param useBucketCols
* Flag to indicate whether the bucketCols should be used as input to
* the hash function
* @param alias
* The alias used for the table in the row resolver
* @param rwsch
* The row resolver used to resolve column references
* @param qbm
* The metadata information for the query block which is used to
* resolve unaliased columns
* @param planExpr
* The plan tree for the expression. If the user specified this, the
* parse expressions are not used
* @return exprNodeDesc
* @exception SemanticException
*/
private ExprNodeDesc genSamplePredicate(TableSample ts,
List<String> bucketCols, boolean useBucketCols, String alias,
RowResolver rwsch, QBMetaData qbm, ExprNodeDesc planExpr)
throws SemanticException {
ExprNodeDesc numeratorExpr = new ExprNodeConstantDesc(
TypeInfoFactory.intTypeInfo, Integer.valueOf(ts.getNumerator() - 1));
ExprNodeDesc denominatorExpr = new ExprNodeConstantDesc(
TypeInfoFactory.intTypeInfo, Integer.valueOf(ts.getDenominator()));
ExprNodeDesc intMaxExpr = new ExprNodeConstantDesc(
TypeInfoFactory.intTypeInfo, Integer.valueOf(Integer.MAX_VALUE));
ArrayList<ExprNodeDesc> args = new ArrayList<ExprNodeDesc>();
if (planExpr != null) {
args.add(planExpr);
} else if (useBucketCols) {
for (String col : bucketCols) {
ColumnInfo ci = rwsch.get(alias, col);
// TODO: change type to the one in the table schema
args.add(new ExprNodeColumnDesc(ci.getType(), ci.getInternalName(), ci
.getTabAlias(), ci.getIsVirtualCol()));
}
} else {
for (ASTNode expr : ts.getExprs()) {
args.add(genExprNodeDesc(expr, rwsch));
}
}
ExprNodeDesc equalsExpr = null;
{
ExprNodeDesc hashfnExpr = new ExprNodeGenericFuncDesc(
TypeInfoFactory.intTypeInfo, new GenericUDFHash(), args);
assert (hashfnExpr != null);
LOG.info("hashfnExpr = " + hashfnExpr);
ExprNodeDesc andExpr = TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("&", hashfnExpr, intMaxExpr);
assert (andExpr != null);
LOG.info("andExpr = " + andExpr);
ExprNodeDesc modExpr = TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("%", andExpr, denominatorExpr);
assert (modExpr != null);
LOG.info("modExpr = " + modExpr);
LOG.info("numeratorExpr = " + numeratorExpr);
equalsExpr = TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("==", modExpr, numeratorExpr);
LOG.info("equalsExpr = " + equalsExpr);
assert (equalsExpr != null);
}
return equalsExpr;
}
private String getAliasId(String alias, QB qb) {
return (qb.getId() == null ? alias : qb.getId() + ":" + alias).toLowerCase();
}
@SuppressWarnings("nls")
private Operator genTablePlan(String alias, QB qb) throws SemanticException {
String alias_id = getAliasId(alias, qb);
Table tab = qb.getMetaData().getSrcForAlias(alias);
RowResolver rwsch;
// is the table already present
Operator<? extends OperatorDesc> top = topOps.get(alias_id);
Operator<? extends OperatorDesc> dummySel = topSelOps.get(alias_id);
if (dummySel != null) {
top = dummySel;
}
if (top == null) {
rwsch = new RowResolver();
try {
StructObjectInspector rowObjectInspector = (StructObjectInspector) tab
.getDeserializer().getObjectInspector();
List<? extends StructField> fields = rowObjectInspector
.getAllStructFieldRefs();
for (int i = 0; i < fields.size(); i++) {
/**
* if the column is a skewed column, use ColumnInfo accordingly
*/
ColumnInfo colInfo = new ColumnInfo(fields.get(i).getFieldName(),
TypeInfoUtils.getTypeInfoFromObjectInspector(fields.get(i)
.getFieldObjectInspector()), alias, false);
colInfo.setSkewedCol((isSkewedCol(alias, qb, fields.get(i)
.getFieldName())) ? true : false);
rwsch.put(alias, fields.get(i).getFieldName(), colInfo);
}
} catch (SerDeException e) {
throw new RuntimeException(e);
}
// Hack!! - refactor once the metadata APIs with types are ready
// Finally add the partitioning columns
for (FieldSchema part_col : tab.getPartCols()) {
LOG.trace("Adding partition col: " + part_col);
rwsch.put(alias, part_col.getName(), new ColumnInfo(part_col.getName(),
TypeInfoFactory.getPrimitiveTypeInfo(part_col.getType()), alias, true));
}
// put all virtual columns in RowResolver.
Iterator<VirtualColumn> vcs = VirtualColumn.getRegistry(conf).iterator();
// use a list for easy cumtomize
List<VirtualColumn> vcList = new ArrayList<VirtualColumn>();
while (vcs.hasNext()) {
VirtualColumn vc = vcs.next();
rwsch.put(alias, vc.getName(), new ColumnInfo(vc.getName(),
vc.getTypeInfo(), alias, true, vc.getIsHidden()));
vcList.add(vc);
}
// Create the root of the operator tree
TableScanDesc tsDesc = new TableScanDesc(alias, vcList);
setupStats(tsDesc, qb.getParseInfo(), tab, alias, rwsch);
SplitSample sample = nameToSplitSample.get(alias_id);
if (sample != null && sample.getRowCount() != null) {
tsDesc.setRowLimit(sample.getRowCount());
nameToSplitSample.remove(alias_id);
}
top = putOpInsertMap(OperatorFactory.get(tsDesc,
new RowSchema(rwsch.getColumnInfos())), rwsch);
// Add this to the list of top operators - we always start from a table
// scan
topOps.put(alias_id, top);
// Add a mapping from the table scan operator to Table
topToTable.put((TableScanOperator) top, tab);
Map<String, String> props = qb.getTabPropsForAlias(alias);
if (props != null) {
topToTableProps.put((TableScanOperator) top, props);
}
} else {
rwsch = opParseCtx.get(top).getRowResolver();
top.setChildOperators(null);
}
// check if this table is sampled and needs more than input pruning
Operator<? extends OperatorDesc> tableOp = top;
TableSample ts = qb.getParseInfo().getTabSample(alias);
if (ts != null) {
int num = ts.getNumerator();
int den = ts.getDenominator();
ArrayList<ASTNode> sampleExprs = ts.getExprs();
// TODO: Do the type checking of the expressions
List<String> tabBucketCols = tab.getBucketCols();
int numBuckets = tab.getNumBuckets();
// If there are no sample cols and no bucket cols then throw an error
if (tabBucketCols.size() == 0 && sampleExprs.size() == 0) {
throw new SemanticException(ErrorMsg.NON_BUCKETED_TABLE.getMsg() + " "
+ tab.getTableName());
}
if (num > den) {
throw new SemanticException(
ErrorMsg.BUCKETED_NUMERATOR_BIGGER_DENOMINATOR.getMsg() + " "
+ tab.getTableName());
}
// check if a predicate is needed
// predicate is needed if either input pruning is not enough
// or if input pruning is not possible
// check if the sample columns are the same as the table bucket columns
boolean colsEqual = true;
if ((sampleExprs.size() != tabBucketCols.size())
&& (sampleExprs.size() != 0)) {
colsEqual = false;
}
for (int i = 0; i < sampleExprs.size() && colsEqual; i++) {
boolean colFound = false;
for (int j = 0; j < tabBucketCols.size() && !colFound; j++) {
if (sampleExprs.get(i).getToken().getType() != HiveParser.TOK_TABLE_OR_COL) {
break;
}
if (((ASTNode) sampleExprs.get(i).getChild(0)).getText()
.equalsIgnoreCase(tabBucketCols.get(j))) {
colFound = true;
}
}
colsEqual = (colsEqual && colFound);
}
// Check if input can be pruned
ts.setInputPruning((sampleExprs == null || sampleExprs.size() == 0 || colsEqual));
// check if input pruning is enough
if ((sampleExprs == null || sampleExprs.size() == 0 || colsEqual)
&& (num == den || (den % numBuckets == 0 || numBuckets % den == 0))) {
// input pruning is enough; add the filter for the optimizer to use it
// later
LOG.info("No need for sample filter");
ExprNodeDesc samplePredicate = genSamplePredicate(ts, tabBucketCols,
colsEqual, alias, rwsch, qb.getMetaData(), null);
tableOp = OperatorFactory.getAndMakeChild(new FilterDesc(
samplePredicate, true, new sampleDesc(ts.getNumerator(), ts
.getDenominator(), tabBucketCols, true)),
new RowSchema(rwsch.getColumnInfos()), top);
} else {
// need to add filter
// create tableOp to be filterDesc and set as child to 'top'
LOG.info("Need sample filter");
ExprNodeDesc samplePredicate = genSamplePredicate(ts, tabBucketCols,
colsEqual, alias, rwsch, qb.getMetaData(), null);
tableOp = OperatorFactory.getAndMakeChild(new FilterDesc(
samplePredicate, true),
new RowSchema(rwsch.getColumnInfos()), top);
}
} else {
boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVETESTMODE);
if (testMode) {
String tabName = tab.getTableName();
// has the user explicitly asked not to sample this table
String unSampleTblList = conf
.getVar(HiveConf.ConfVars.HIVETESTMODENOSAMPLE);
String[] unSampleTbls = unSampleTblList.split(",");
boolean unsample = false;
for (String unSampleTbl : unSampleTbls) {
if (tabName.equalsIgnoreCase(unSampleTbl)) {
unsample = true;
}
}
if (!unsample) {
int numBuckets = tab.getNumBuckets();
// If the input table is bucketed, choose the first bucket
if (numBuckets > 0) {
TableSample tsSample = new TableSample(1, numBuckets);
tsSample.setInputPruning(true);
qb.getParseInfo().setTabSample(alias, tsSample);
ExprNodeDesc samplePred = genSamplePredicate(tsSample, tab
.getBucketCols(), true, alias, rwsch, qb.getMetaData(), null);
tableOp = OperatorFactory
.getAndMakeChild(new FilterDesc(samplePred, true,
new sampleDesc(tsSample.getNumerator(), tsSample
.getDenominator(), tab.getBucketCols(), true)),
new RowSchema(rwsch.getColumnInfos()), top);
LOG.info("No need for sample filter");
} else {
// The table is not bucketed, add a dummy filter :: rand()
int freq = conf.getIntVar(HiveConf.ConfVars.HIVETESTMODESAMPLEFREQ);
TableSample tsSample = new TableSample(1, freq);
tsSample.setInputPruning(false);
qb.getParseInfo().setTabSample(alias, tsSample);
LOG.info("Need sample filter");
ExprNodeDesc randFunc = TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("rand", new ExprNodeConstantDesc(Integer
.valueOf(460476415)));
ExprNodeDesc samplePred = genSamplePredicate(tsSample, null, false,
alias, rwsch, qb.getMetaData(), randFunc);
tableOp = OperatorFactory.getAndMakeChild(new FilterDesc(
samplePred, true),
new RowSchema(rwsch.getColumnInfos()), top);
}
}
}
}
Operator output = putOpInsertMap(tableOp, rwsch);
if (LOG.isDebugEnabled()) {
LOG.debug("Created Table Plan for " + alias + " " + tableOp.toString());
}
return output;
}
private boolean isSkewedCol(String alias, QB qb, String colName) {
boolean isSkewedCol = false;
List<String> skewedCols = qb.getSkewedColumnNames(alias);
for (String skewedCol : skewedCols) {
if (skewedCol.equalsIgnoreCase(colName)) {
isSkewedCol = true;
}
}
return isSkewedCol;
}
private void setupStats(TableScanDesc tsDesc, QBParseInfo qbp, Table tab, String alias,
RowResolver rwsch)
throws SemanticException {
if (!qbp.isAnalyzeCommand()) {
tsDesc.setGatherStats(false);
} else {
if (HiveConf.getVar(conf, HIVESTATSDBCLASS).equalsIgnoreCase(StatDB.fs.name())) {
String statsTmpLoc = ctx.getExternalTmpPath(tab.getPath().toUri()).toString();
LOG.info("Set stats collection dir : " + statsTmpLoc);
conf.set(StatsSetupConst.STATS_TMP_LOC, statsTmpLoc);
}
tsDesc.setGatherStats(true);
tsDesc.setStatsReliable(conf.getBoolVar(HiveConf.ConfVars.HIVE_STATS_RELIABLE));
tsDesc.setMaxStatsKeyPrefixLength(StatsFactory.getMaxPrefixLength(conf));
// append additional virtual columns for storing statistics
Iterator<VirtualColumn> vcs = VirtualColumn.getStatsRegistry(conf).iterator();
List<VirtualColumn> vcList = new ArrayList<VirtualColumn>();
while (vcs.hasNext()) {
VirtualColumn vc = vcs.next();
rwsch.put(alias, vc.getName(), new ColumnInfo(vc.getName(),
vc.getTypeInfo(), alias, true, vc.getIsHidden()));
vcList.add(vc);
}
tsDesc.addVirtualCols(vcList);
String tblName = tab.getTableName();
tableSpec tblSpec = qbp.getTableSpec(alias);
Map<String, String> partSpec = tblSpec.getPartSpec();
if (partSpec != null) {
List<String> cols = new ArrayList<String>();
cols.addAll(partSpec.keySet());
tsDesc.setPartColumns(cols);
}
// Theoretically the key prefix could be any unique string shared
// between TableScanOperator (when publishing) and StatsTask (when aggregating).
// Here we use
// db_name.table_name + partitionSec
// as the prefix for easy of read during explain and debugging.
// Currently, partition spec can only be static partition.
String k = tblName + Path.SEPARATOR;
tsDesc.setStatsAggPrefix(tab.getDbName()+"."+k);
// set up WriteEntity for replication
outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_SHARED));
// add WriteEntity for each matching partition
if (tab.isPartitioned()) {
if (partSpec == null) {
throw new SemanticException(ErrorMsg.NEED_PARTITION_SPECIFICATION.getMsg());
}
List<Partition> partitions = qbp.getTableSpec().partitions;
if (partitions != null) {
for (Partition partn : partitions) {
// inputs.add(new ReadEntity(partn)); // is this needed at all?
outputs.add(new WriteEntity(partn, WriteEntity.WriteType.DDL_NO_LOCK));
}
}
}
}
}
private Operator genPlan(QBExpr qbexpr) throws SemanticException {
if (qbexpr.getOpcode() == QBExpr.Opcode.NULLOP) {
return genPlan(qbexpr.getQB());
}
if (qbexpr.getOpcode() == QBExpr.Opcode.UNION) {
Operator qbexpr1Ops = genPlan(qbexpr.getQBExpr1());
Operator qbexpr2Ops = genPlan(qbexpr.getQBExpr2());
return genUnionPlan(qbexpr.getAlias(), qbexpr.getQBExpr1().getAlias(),
qbexpr1Ops, qbexpr.getQBExpr2().getAlias(), qbexpr2Ops);
}
return null;
}
@SuppressWarnings("nls")
public Operator genPlan(QB qb) throws SemanticException {
// First generate all the opInfos for the elements in the from clause
Map<String, Operator> aliasToOpInfo = new HashMap<String, Operator>();
// Recurse over the subqueries to fill the subquery part of the plan
for (String alias : qb.getSubqAliases()) {
QBExpr qbexpr = qb.getSubqForAlias(alias);
aliasToOpInfo.put(alias, genPlan(qbexpr));
qbexpr.setAlias(alias);
}
// Recurse over all the source tables
for (String alias : qb.getTabAliases()) {
Operator op = genTablePlan(alias, qb);
aliasToOpInfo.put(alias, op);
}
if (aliasToOpInfo.isEmpty()) {
qb.getMetaData().setSrcForAlias(DUMMY_TABLE, getDummyTable());
TableScanOperator op = (TableScanOperator) genTablePlan(DUMMY_TABLE, qb);
op.getConf().setRowLimit(1);
qb.addAlias(DUMMY_TABLE);
qb.setTabAlias(DUMMY_TABLE, DUMMY_TABLE);
aliasToOpInfo.put(DUMMY_TABLE, op);
}
Operator srcOpInfo = null;
Operator lastPTFOp = null;
if(queryProperties.hasPTF()){
//After processing subqueries and source tables, process
// partitioned table functions
HashMap<ASTNode, PTFInvocationSpec> ptfNodeToSpec = qb.getPTFNodeToSpec();
if ( ptfNodeToSpec != null ) {
for(Entry<ASTNode, PTFInvocationSpec> entry : ptfNodeToSpec.entrySet()) {
ASTNode ast = entry.getKey();
PTFInvocationSpec spec = entry.getValue();
String inputAlias = spec.getQueryInputName();
Operator inOp = aliasToOpInfo.get(inputAlias);
if ( inOp == null ) {
throw new SemanticException(generateErrorMessage(ast,
"Cannot resolve input Operator for PTF invocation"));
}
lastPTFOp = genPTFPlan(spec, inOp);
String ptfAlias = spec.getFunction().getAlias();
if ( ptfAlias != null ) {
aliasToOpInfo.put(ptfAlias, lastPTFOp);
}
}
}
}
// For all the source tables that have a lateral view, attach the
// appropriate operators to the TS
genLateralViewPlans(aliasToOpInfo, qb);
// process join
if (qb.getParseInfo().getJoinExpr() != null) {
ASTNode joinExpr = qb.getParseInfo().getJoinExpr();
if (joinExpr.getToken().getType() == HiveParser.TOK_UNIQUEJOIN) {
QBJoinTree joinTree = genUniqueJoinTree(qb, joinExpr, aliasToOpInfo);
qb.setQbJoinTree(joinTree);
} else {
QBJoinTree joinTree = genJoinTree(qb, joinExpr, aliasToOpInfo);
qb.setQbJoinTree(joinTree);
/*
* if there is only one destination in Query try to push where predicates
* as Join conditions
*/
Set<String> dests = qb.getParseInfo().getClauseNames();
if ( dests.size() == 1 ) {
String dest = dests.iterator().next();
ASTNode whereClause = qb.getParseInfo().getWhrForClause(dest);
if ( whereClause != null ) {
extractJoinCondsFromWhereClause(joinTree, qb, dest,
(ASTNode) whereClause.getChild(0),
aliasToOpInfo );
}
}
if (!disableJoinMerge)
mergeJoinTree(qb);
}
// if any filters are present in the join tree, push them on top of the
// table
pushJoinFilters(qb, qb.getQbJoinTree(), aliasToOpInfo);
srcOpInfo = genJoinPlan(qb, aliasToOpInfo);
} else {
// Now if there are more than 1 sources then we have a join case
// later we can extend this to the union all case as well
srcOpInfo = aliasToOpInfo.values().iterator().next();
// with ptfs, there maybe more (note for PTFChains:
// 1 ptf invocation may entail multiple PTF operators)
srcOpInfo = lastPTFOp != null ? lastPTFOp : srcOpInfo;
}
Operator bodyOpInfo = genBodyPlan(qb, srcOpInfo, aliasToOpInfo);
if (LOG.isDebugEnabled()) {
LOG.debug("Created Plan for Query Block " + qb.getId());
}
this.qb = qb;
return bodyOpInfo;
}
private Table getDummyTable() throws SemanticException {
Path dummyPath = createDummyFile();
Table desc = new Table(DUMMY_DATABASE, DUMMY_TABLE);
desc.getTTable().getSd().setLocation(dummyPath.toString());
desc.getTTable().getSd().getSerdeInfo().setSerializationLib(NullStructSerDe.class.getName());
desc.setInputFormatClass(NullRowsInputFormat.class);
desc.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class);
return desc;
}
// add dummy data for not removed by CombineHiveInputFormat, etc.
private Path createDummyFile() throws SemanticException {
Path dummyPath = new Path(ctx.getMRScratchDir(), "dummy_path");
Path dummyFile = new Path(dummyPath, "dummy_file");
FSDataOutputStream fout = null;
try {
FileSystem fs = dummyFile.getFileSystem(conf);
if (fs.exists(dummyFile)) {
return dummyPath;
}
fout = fs.create(dummyFile);
fout.write(1);
fout.close();
} catch (IOException e) {
throw new SemanticException(e);
} finally {
IOUtils.closeStream(fout);
}
return dummyPath;
}
/**
* Generates the operator DAG needed to implement lateral views and attaches
* it to the TS operator.
*
* @param aliasToOpInfo
* A mapping from a table alias to the TS operator. This function
* replaces the operator mapping as necessary
* @param qb
* @throws SemanticException
*/
void genLateralViewPlans(Map<String, Operator> aliasToOpInfo, QB qb)
throws SemanticException {
Map<String, ArrayList<ASTNode>> aliasToLateralViews = qb.getParseInfo()
.getAliasToLateralViews();
for (Entry<String, Operator> e : aliasToOpInfo.entrySet()) {
String alias = e.getKey();
// See if the alias has a lateral view. If so, chain the lateral view
// operator on
ArrayList<ASTNode> lateralViews = aliasToLateralViews.get(alias);
if (lateralViews != null) {
Operator op = e.getValue();
for (ASTNode lateralViewTree : aliasToLateralViews.get(alias)) {
// There are 2 paths from the TS operator (or a previous LVJ operator)
// to the same LateralViewJoinOperator.
// TS -> SelectOperator(*) -> LateralViewJoinOperator
// TS -> SelectOperator (gets cols for UDTF) -> UDTFOperator0
// -> LateralViewJoinOperator
//
Operator lateralViewJoin = genLateralViewPlan(qb, op, lateralViewTree);
op = lateralViewJoin;
}
e.setValue(op);
}
}
}
private Operator genLateralViewPlanForDest(String dest, QB qb, Operator op)
throws SemanticException {
ASTNode lateralViewTree = qb.getParseInfo().getDestToLateralView().get(dest);
if (lateralViewTree != null) {
return genLateralViewPlan(qb, op, lateralViewTree);
}
return op;
}
private Operator genLateralViewPlan(QB qb, Operator op, ASTNode lateralViewTree)
throws SemanticException {
RowResolver lvForwardRR = new RowResolver();
RowResolver source = opParseCtx.get(op).getRowResolver();
for (ColumnInfo col : source.getColumnInfos()) {
String[] tabCol = source.reverseLookup(col.getInternalName());
lvForwardRR.put(tabCol[0], tabCol[1], col);
}
Operator lvForward = putOpInsertMap(OperatorFactory.getAndMakeChild(
new LateralViewForwardDesc(), new RowSchema(lvForwardRR.getColumnInfos()),
op), lvForwardRR);
// The order in which the two paths are added is important. The
// lateral view join operator depends on having the select operator
// give it the row first.
// Get the all path by making a select(*).
RowResolver allPathRR = opParseCtx.get(lvForward).getRowResolver();
// Operator allPath = op;
Operator allPath = putOpInsertMap(OperatorFactory.getAndMakeChild(
new SelectDesc(true), new RowSchema(allPathRR.getColumnInfos()),
lvForward), allPathRR);
int allColumns = allPathRR.getColumnInfos().size();
// Get the UDTF Path
QB blankQb = new QB(null, null, false);
Operator udtfPath = genSelectPlan((ASTNode) lateralViewTree
.getChild(0), blankQb, lvForward,
lateralViewTree.getType() == HiveParser.TOK_LATERAL_VIEW_OUTER);
// add udtf aliases to QB
for (String udtfAlias : blankQb.getAliases()) {
qb.addAlias(udtfAlias);
}
RowResolver udtfPathRR = opParseCtx.get(udtfPath).getRowResolver();
// Merge the two into the lateral view join
// The cols of the merged result will be the combination of both the
// cols of the UDTF path and the cols of the all path. The internal
// names have to be changed to avoid conflicts
RowResolver lateralViewRR = new RowResolver();
ArrayList<String> outputInternalColNames = new ArrayList<String>();
// For PPD, we need a column to expression map so that during the walk,
// the processor knows how to transform the internal col names.
// Following steps are dependant on the fact that we called
// LVmerge.. in the above order
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
LVmergeRowResolvers(allPathRR, lateralViewRR, colExprMap, outputInternalColNames);
LVmergeRowResolvers(udtfPathRR, lateralViewRR, colExprMap, outputInternalColNames);
Operator lateralViewJoin = putOpInsertMap(OperatorFactory
.getAndMakeChild(new LateralViewJoinDesc(allColumns, outputInternalColNames),
new RowSchema(lateralViewRR.getColumnInfos()), allPath,
udtfPath), lateralViewRR);
lateralViewJoin.setColumnExprMap(colExprMap);
return lateralViewJoin;
}
/**
* A helper function that gets all the columns and respective aliases in the
* source and puts them into dest. It renames the internal names of the
* columns based on getColumnInternalName(position).
*
* Note that this helper method relies on RowResolver.getColumnInfos()
* returning the columns in the same order as they will be passed in the
* operator DAG.
*
* @param source
* @param dest
* @param outputColNames
* - a list to which the new internal column names will be added, in
* the same order as in the dest row resolver
*/
private void LVmergeRowResolvers(RowResolver source, RowResolver dest,
Map<String, ExprNodeDesc> colExprMap, ArrayList<String> outputInternalColNames) {
for (ColumnInfo c : source.getColumnInfos()) {
String internalName = getColumnInternalName(outputInternalColNames.size());
outputInternalColNames.add(internalName);
ColumnInfo newCol = new ColumnInfo(internalName, c.getType(), c
.getTabAlias(), c.getIsVirtualCol(), c.isHiddenVirtualCol());
String[] tableCol = source.reverseLookup(c.getInternalName());
String tableAlias = tableCol[0];
String colAlias = tableCol[1];
dest.put(tableAlias, colAlias, newCol);
colExprMap.put(internalName, new ExprNodeColumnDesc(c.getType(), c.getInternalName(),
c.getTabAlias(), c.getIsVirtualCol()));
}
}
@SuppressWarnings("nls")
public Phase1Ctx initPhase1Ctx() {
Phase1Ctx ctx_1 = new Phase1Ctx();
ctx_1.nextNum = 0;
ctx_1.dest = "reduce";
return ctx_1;
}
@Override
public void init() {
// clear most members
reset();
// init
QB qb = new QB(null, null, false);
this.qb = qb;
}
@Override
@SuppressWarnings("nls")
public void analyzeInternal(ASTNode ast) throws SemanticException {
ASTNode child = ast;
this.ast = ast;
viewsExpanded = new ArrayList<String>();
ctesExpanded = new ArrayList<String>();
LOG.info("Starting Semantic Analysis");
// analyze and process the position alias
processPositionAlias(ast);
// analyze create table command
if (ast.getToken().getType() == HiveParser.TOK_CREATETABLE) {
// if it is not CTAS, we don't need to go further and just return
if ((child = analyzeCreateTable(ast, qb)) == null) {
return;
}
} else {
SessionState.get().setCommandType(HiveOperation.QUERY);
}
// analyze create view command
if (ast.getToken().getType() == HiveParser.TOK_CREATEVIEW ||
ast.getToken().getType() == HiveParser.TOK_ALTERVIEW_AS) {
child = analyzeCreateView(ast, qb);
SessionState.get().setCommandType(HiveOperation.CREATEVIEW);
if (child == null) {
return;
}
viewSelect = child;
// prevent view from referencing itself
viewsExpanded.add(SessionState.get().getCurrentDatabase() + "." + createVwDesc.getViewName());
}
// continue analyzing from the child ASTNode.
Phase1Ctx ctx_1 = initPhase1Ctx();
if (!doPhase1(child, qb, ctx_1)) {
// if phase1Result false return
return;
}
LOG.info("Completed phase 1 of Semantic Analysis");
getMetaData(qb);
LOG.info("Completed getting MetaData in Semantic Analysis");
if (runCBO) {
boolean tokenTypeIsQuery = ast.getToken().getType() == HiveParser.TOK_QUERY
|| ast.getToken().getType() == HiveParser.TOK_EXPLAIN;
if (!tokenTypeIsQuery || createVwDesc != null
|| !HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_ENABLED)
|| !canHandleQuery()) {
runCBO = false;
}
if (runCBO) {
disableJoinMerge = true;
}
}
// Save the result schema derived from the sink operator produced
// by genPlan. This has the correct column names, which clients
// such as JDBC would prefer instead of the c0, c1 we'll end
// up with later.
Operator sinkOp = null;
if (runCBO) {
boolean reAnalyzeAST = false;
try {
// 1. Gen Optimized AST
ASTNode newAST = new OptiqBasedPlanner().getOptimizedAST();
// 2. Regen OP plan from optimized AST
init();
ctx_1 = initPhase1Ctx();
if (!doPhase1(newAST, qb, ctx_1)) {
throw new RuntimeException(
"Couldn't do phase1 on CBO optimized query plan");
}
getMetaData(qb);
disableJoinMerge = true;
sinkOp = genPlan(qb);
/*
* Use non CBO Result Set Schema so as to preserve user specified names.
* Hive seems to have bugs with OB/LIMIT in sub queries. // 3. Reset
* result set schema resultSchema =
* convertRowSchemaToResultSetSchema(opParseCtx.get(sinkOp)
* .getRowResolver(), true);
*/
} catch (Exception e) {
LOG.warn("CBO failed, skipping CBO. ", e);
throw new RuntimeException(e);
} finally {
runCBO = false;
disableJoinMerge = false;
if (reAnalyzeAST) {
init();
analyzeInternal(ast);
return;
}
}
} else {
sinkOp = genPlan(qb);
}
if (createVwDesc != null)
resultSchema = convertRowSchemaToViewSchema(opParseCtx.get(sinkOp).getRowResolver());
else
resultSchema = convertRowSchemaToResultSetSchema(opParseCtx.get(sinkOp).getRowResolver(),
HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_RESULTSET_USE_UNIQUE_COLUMN_NAMES));
ParseContext pCtx = new ParseContext(conf, qb, child, opToPartPruner,
opToPartList, topOps, topSelOps, opParseCtx, joinContext, smbMapJoinContext,
topToTable, topToTableProps, fsopToTable,
loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
listMapJoinOpsNoReducer, groupOpToInputTables, prunedPartitions,
opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks,
opToPartToSkewedPruner, viewAliasToInput,
reduceSinkOperatorsAddedByEnforceBucketingSorting, queryProperties);
if (createVwDesc != null) {
saveViewDefinition();
// validate the create view statement
// at this point, the createVwDesc gets all the information for semantic check
validateCreateView(createVwDesc);
// Since we're only creating a view (not executing it), we
// don't need to optimize or translate the plan (and in fact, those
// procedures can interfere with the view creation). So
// skip the rest of this method.
ctx.setResDir(null);
ctx.setResFile(null);
try {
PlanUtils.addInputsForView(pCtx);
} catch (HiveException e) {
throw new SemanticException(e);
}
return;
}
// Generate table access stats if required
if (HiveConf.getBoolVar(this.conf, HiveConf.ConfVars.HIVE_STATS_COLLECT_TABLEKEYS) == true) {
TableAccessAnalyzer tableAccessAnalyzer = new TableAccessAnalyzer(pCtx);
setTableAccessInfo(tableAccessAnalyzer.analyzeTableAccess());
}
if (LOG.isDebugEnabled()) {
LOG.debug("Before logical optimization\n" + Operator.toString(pCtx.getTopOps().values()));
}
Optimizer optm = new Optimizer();
optm.setPctx(pCtx);
optm.initialize(conf);
pCtx = optm.optimize();
FetchTask origFetchTask = pCtx.getFetchTask();
if (LOG.isDebugEnabled()) {
LOG.debug("After logical optimization\n" + Operator.toString(pCtx.getTopOps().values()));
}
// Generate column access stats if required - wait until column pruning takes place
// during optimization
if (HiveConf.getBoolVar(this.conf, HiveConf.ConfVars.HIVE_STATS_COLLECT_SCANCOLS) == true) {
ColumnAccessAnalyzer columnAccessAnalyzer = new ColumnAccessAnalyzer(pCtx);
setColumnAccessInfo(columnAccessAnalyzer.analyzeColumnAccess());
}
if (!ctx.getExplainLogical()) {
// At this point we have the complete operator tree
// from which we want to create the map-reduce plan
TaskCompiler compiler = TaskCompilerFactory.getCompiler(conf, pCtx);
compiler.init(conf, console, db);
compiler.compile(pCtx, rootTasks, inputs, outputs);
fetchTask = pCtx.getFetchTask();
}
LOG.info("Completed plan generation");
if (!ctx.getExplain()) {
// if desired check we're not going over partition scan limits
enforceScanLimits(pCtx, origFetchTask);
}
return;
}
private void enforceScanLimits(ParseContext pCtx, FetchTask fTask)
throws SemanticException {
int scanLimit = HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVELIMITTABLESCANPARTITION);
if (scanLimit > -1) {
// a scan limit on the number of partitions has been set by the user
if (fTask != null) {
// having a fetch task at this point means that we're not going to
// launch a job on the cluster
if (!fTask.getWork().isNotPartitioned() && fTask.getWork().getLimit() == -1
&& scanLimit < fTask.getWork().getPartDir().size()) {
throw new SemanticException(ErrorMsg.PARTITION_SCAN_LIMIT_EXCEEDED, ""
+ fTask.getWork().getPartDir().size(), ""
+ fTask.getWork().getTblDesc().getTableName(), "" + scanLimit);
}
} else {
// At this point we've run the partition pruner for all top ops. Let's
// check whether any of them break the limit
for (Operator<?> topOp : topOps.values()) {
if (topOp instanceof TableScanOperator) {
if (((TableScanDesc)topOp.getConf()).getIsMetadataOnly()) {
continue;
}
PrunedPartitionList parts = pCtx.getOpToPartList().get((TableScanOperator) topOp);
if (parts.getPartitions().size() > scanLimit) {
throw new SemanticException(ErrorMsg.PARTITION_SCAN_LIMIT_EXCEEDED, ""
+ parts.getPartitions().size(), "" + parts.getSourceTable().getTableName(), ""
+ scanLimit);
}
}
}
}
}
}
@Override
public List<FieldSchema> getResultSchema() {
return resultSchema;
}
private void saveViewDefinition() throws SemanticException {
// Make a copy of the statement's result schema, since we may
// modify it below as part of imposing view column names.
List<FieldSchema> derivedSchema =
new ArrayList<FieldSchema>(resultSchema);
ParseUtils.validateColumnNameUniqueness(derivedSchema);
List<FieldSchema> imposedSchema = createVwDesc.getSchema();
if (imposedSchema != null) {
int explicitColCount = imposedSchema.size();
int derivedColCount = derivedSchema.size();
if (explicitColCount != derivedColCount) {
throw new SemanticException(generateErrorMessage(
viewSelect,
ErrorMsg.VIEW_COL_MISMATCH.getMsg()));
}
}
// Preserve the original view definition as specified by the user.
String originalText = ctx.getTokenRewriteStream().toString(
viewSelect.getTokenStartIndex(), viewSelect.getTokenStopIndex());
createVwDesc.setViewOriginalText(originalText);
// Now expand the view definition with extras such as explicit column
// references; this expanded form is what we'll re-parse when the view is
// referenced later.
unparseTranslator.applyTranslations(ctx.getTokenRewriteStream());
String expandedText = ctx.getTokenRewriteStream().toString(
viewSelect.getTokenStartIndex(), viewSelect.getTokenStopIndex());
if (imposedSchema != null) {
// Merge the names from the imposed schema into the types
// from the derived schema.
StringBuilder sb = new StringBuilder();
sb.append("SELECT ");
int n = derivedSchema.size();
for (int i = 0; i < n; ++i) {
if (i > 0) {
sb.append(", ");
}
FieldSchema fieldSchema = derivedSchema.get(i);
// Modify a copy, not the original
fieldSchema = new FieldSchema(fieldSchema);
derivedSchema.set(i, fieldSchema);
sb.append(HiveUtils.unparseIdentifier(fieldSchema.getName(), conf));
sb.append(" AS ");
String imposedName = imposedSchema.get(i).getName();
sb.append(HiveUtils.unparseIdentifier(imposedName, conf));
fieldSchema.setName(imposedName);
// We don't currently allow imposition of a type
fieldSchema.setComment(imposedSchema.get(i).getComment());
}
sb.append(" FROM (");
sb.append(expandedText);
sb.append(") ");
sb.append(HiveUtils.unparseIdentifier(createVwDesc.getViewName(), conf));
expandedText = sb.toString();
}
if (createVwDesc.getPartColNames() != null) {
// Make sure all partitioning columns referenced actually
// exist and are in the correct order at the end
// of the list of columns produced by the view. Also move the field
// schema descriptors from derivedSchema to the partitioning key
// descriptor.
List<String> partColNames = createVwDesc.getPartColNames();
if (partColNames.size() > derivedSchema.size()) {
throw new SemanticException(
ErrorMsg.VIEW_PARTITION_MISMATCH.getMsg());
}
// Get the partition columns from the end of derivedSchema.
List<FieldSchema> partitionColumns = derivedSchema.subList(
derivedSchema.size() - partColNames.size(),
derivedSchema.size());
// Verify that the names match the PARTITIONED ON clause.
Iterator<String> colNameIter = partColNames.iterator();
Iterator<FieldSchema> schemaIter = partitionColumns.iterator();
while (colNameIter.hasNext()) {
String colName = colNameIter.next();
FieldSchema fieldSchema = schemaIter.next();
if (!fieldSchema.getName().equals(colName)) {
throw new SemanticException(
ErrorMsg.VIEW_PARTITION_MISMATCH.getMsg());
}
}
// Boundary case: require at least one non-partitioned column
// for consistency with tables.
if (partColNames.size() == derivedSchema.size()) {
throw new SemanticException(
ErrorMsg.VIEW_PARTITION_TOTAL.getMsg());
}
// Now make a copy.
createVwDesc.setPartCols(
new ArrayList<FieldSchema>(partitionColumns));
// Finally, remove the partition columns from the end of derivedSchema.
// (Clearing the subList writes through to the underlying
// derivedSchema ArrayList.)
partitionColumns.clear();
}
createVwDesc.setSchema(derivedSchema);
createVwDesc.setViewExpandedText(expandedText);
}
private List<FieldSchema> convertRowSchemaToViewSchema(RowResolver rr) throws SemanticException {
List<FieldSchema> fieldSchema = convertRowSchemaToResultSetSchema(rr, false);
ParseUtils.validateColumnNameUniqueness(fieldSchema);
return fieldSchema;
}
private List<FieldSchema> convertRowSchemaToResultSetSchema(RowResolver rr,
boolean useTabAliasIfAvailable) {
List<FieldSchema> fieldSchemas = new ArrayList<FieldSchema>();
String[] qualifiedColName;
String colName;
for (ColumnInfo colInfo : rr.getColumnInfos()) {
if (colInfo.isHiddenVirtualCol()) {
continue;
}
qualifiedColName = rr.reverseLookup(colInfo.getInternalName());
if (useTabAliasIfAvailable && qualifiedColName[0] != null && !qualifiedColName[0].isEmpty()) {
colName = qualifiedColName[0] + "." + qualifiedColName[1];
} else {
colName = qualifiedColName[1];
}
fieldSchemas.add(new FieldSchema(colName, colInfo.getType().getTypeName(), null));
}
return fieldSchemas;
}
/**
* Generates an expression node descriptor for the expression with TypeCheckCtx.
*/
public ExprNodeDesc genExprNodeDesc(ASTNode expr, RowResolver input)
throws SemanticException {
// Since the user didn't supply a customized type-checking context,
// use default settings.
TypeCheckCtx tcCtx = new TypeCheckCtx(input);
return genExprNodeDesc(expr, input, tcCtx);
}
/**
* Generates an expression node descriptors for the expression and children of it
* with default TypeCheckCtx.
*/
public Map<ASTNode, ExprNodeDesc> genAllExprNodeDesc(ASTNode expr, RowResolver input)
throws SemanticException {
TypeCheckCtx tcCtx = new TypeCheckCtx(input);
return genAllExprNodeDesc(expr, input, tcCtx);
}
/**
* Returns expression node descriptor for the expression.
* If it's evaluated already in previous operator, it can be retrieved from cache.
*/
public ExprNodeDesc genExprNodeDesc(ASTNode expr, RowResolver input,
TypeCheckCtx tcCtx) throws SemanticException {
// We recursively create the exprNodeDesc. Base cases: when we encounter
// a column ref, we convert that into an exprNodeColumnDesc; when we
// encounter
// a constant, we convert that into an exprNodeConstantDesc. For others we
// just
// build the exprNodeFuncDesc with recursively built children.
// If the current subExpression is pre-calculated, as in Group-By etc.
ExprNodeDesc cached = getExprNodeDescCached(expr, input);
if (cached == null) {
Map<ASTNode, ExprNodeDesc> allExprs = genAllExprNodeDesc(expr, input, tcCtx);
return allExprs.get(expr);
}
return cached;
}
/**
* Find ExprNodeDesc for the expression cached in the RowResolver. Returns null if not exists.
*/
private ExprNodeDesc getExprNodeDescCached(ASTNode expr, RowResolver input)
throws SemanticException {
ColumnInfo colInfo = input.getExpression(expr);
if (colInfo != null) {
ASTNode source = input.getExpressionSource(expr);
if (source != null) {
unparseTranslator.addCopyTranslation(expr, source);
}
return new ExprNodeColumnDesc(colInfo.getType(), colInfo
.getInternalName(), colInfo.getTabAlias(), colInfo
.getIsVirtualCol(), colInfo.isSkewedCol());
}
return null;
}
/**
* Generates all of the expression node descriptors for the expression and children of it
* passed in the arguments. This function uses the row resolver and the metadata information
* that are passed as arguments to resolve the column names to internal names.
*
* @param expr
* The expression
* @param input
* The row resolver
* @param tcCtx
* Customized type-checking context
* @return expression to exprNodeDesc mapping
* @throws SemanticException Failed to evaluate expression
*/
@SuppressWarnings("nls")
public Map<ASTNode, ExprNodeDesc> genAllExprNodeDesc(ASTNode expr, RowResolver input,
TypeCheckCtx tcCtx) throws SemanticException {
// Create the walker and the rules dispatcher.
tcCtx.setUnparseTranslator(unparseTranslator);
Map<ASTNode, ExprNodeDesc> nodeOutputs =
TypeCheckProcFactory.genExprNode(expr, tcCtx);
ExprNodeDesc desc = nodeOutputs.get(expr);
if (desc == null) {
String errMsg = tcCtx.getError();
if (errMsg == null) {
errMsg = "Error in parsing ";
}
throw new SemanticException(errMsg);
}
if (desc instanceof ExprNodeColumnListDesc) {
throw new SemanticException("TOK_ALLCOLREF is not supported in current context");
}
if (!unparseTranslator.isEnabled()) {
// Not creating a view, so no need to track view expansions.
return nodeOutputs;
}
for (Map.Entry<ASTNode, ExprNodeDesc> entry : nodeOutputs.entrySet()) {
if (!(entry.getValue() instanceof ExprNodeColumnDesc)) {
continue;
}
ASTNode node = entry.getKey();
ExprNodeColumnDesc columnDesc = (ExprNodeColumnDesc) entry.getValue();
if ((columnDesc.getTabAlias() == null)
|| (columnDesc.getTabAlias().length() == 0)) {
// These aren't real column refs; instead, they are special
// internal expressions used in the representation of aggregation.
continue;
}
String[] tmp = input.reverseLookup(columnDesc.getColumn());
StringBuilder replacementText = new StringBuilder();
replacementText.append(HiveUtils.unparseIdentifier(tmp[0], conf));
replacementText.append(".");
replacementText.append(HiveUtils.unparseIdentifier(tmp[1], conf));
unparseTranslator.addTranslation(node, replacementText.toString());
}
return nodeOutputs;
}
@Override
public void validate() throws SemanticException {
LOG.debug("validation start");
// Validate inputs and outputs have right protectmode to execute the query
for (ReadEntity readEntity : getInputs()) {
ReadEntity.Type type = readEntity.getType();
if (type != ReadEntity.Type.TABLE &&
type != ReadEntity.Type.PARTITION) {
// In current implementation it will never happen, but we leave it
// here to make the logic complete.
continue;
}
Table tbl = readEntity.getTable();
Partition p = readEntity.getPartition();
if (tbl.isOffline()) {
throw new SemanticException(
ErrorMsg.OFFLINE_TABLE_OR_PARTITION.getMsg(
"Table " + tbl.getTableName()));
}
if (type == ReadEntity.Type.PARTITION && p != null && p.isOffline()) {
throw new SemanticException(
ErrorMsg.OFFLINE_TABLE_OR_PARTITION.getMsg(
"Table " + tbl.getTableName() +
" Partition " + p.getName()));
}
}
for (WriteEntity writeEntity : getOutputs()) {
WriteEntity.Type type = writeEntity.getType();
if (type == WriteEntity.Type.PARTITION || type == WriteEntity.Type.DUMMYPARTITION) {
String conflictingArchive;
try {
Partition usedp = writeEntity.getPartition();
Table tbl = usedp.getTable();
LOG.debug("validated " + usedp.getName());
LOG.debug(usedp.getTable());
conflictingArchive = ArchiveUtils
.conflictingArchiveNameOrNull(db, tbl, usedp.getSpec());
} catch (HiveException e) {
throw new SemanticException(e);
}
if (conflictingArchive != null) {
String message = String.format("Insert conflict with existing archive: %s",
conflictingArchive);
throw new SemanticException(message);
}
}
if (type != WriteEntity.Type.TABLE &&
type != WriteEntity.Type.PARTITION) {
LOG.debug("not validating writeEntity, because entity is neither table nor partition");
continue;
}
Table tbl;
Partition p;
if (type == WriteEntity.Type.PARTITION) {
Partition inputPartition = writeEntity.getPartition();
// If it is a partition, Partition's metastore is not fetched. We
// need to fetch it.
try {
p = Hive.get().getPartition(
inputPartition.getTable(), inputPartition.getSpec(), false);
if (p != null) {
tbl = p.getTable();
} else {
// if p is null, we assume that we insert to a new partition
tbl = inputPartition.getTable();
}
} catch (HiveException e) {
throw new SemanticException(e);
}
if (type == WriteEntity.Type.PARTITION && p != null && p.isOffline()) {
throw new SemanticException(
ErrorMsg.OFFLINE_TABLE_OR_PARTITION.getMsg(
" Table " + tbl.getTableName() +
" Partition " + p.getName()));
}
}
else {
LOG.debug("Not a partition.");
tbl = writeEntity.getTable();
}
if (tbl.isOffline()) {
throw new SemanticException(
ErrorMsg.OFFLINE_TABLE_OR_PARTITION.getMsg(
"Table " + tbl.getTableName()));
}
}
boolean reworkMapredWork = HiveConf.getBoolVar(this.conf,
HiveConf.ConfVars.HIVE_REWORK_MAPREDWORK);
// validate all tasks
for (Task<? extends Serializable> rootTask : rootTasks) {
validate(rootTask, reworkMapredWork);
}
}
private void validate(Task<? extends Serializable> task, boolean reworkMapredWork)
throws SemanticException {
Utilities.reworkMapRedWork(task, reworkMapredWork, conf);
if (task.getChildTasks() == null) {
return;
}
for (Task<? extends Serializable> childTask : task.getChildTasks()) {
validate(childTask, reworkMapredWork);
}
}
/**
* Get the row resolver given an operator.
*/
public RowResolver getRowResolver(Operator opt) {
return opParseCtx.get(opt).getRowResolver();
}
/**
* Add default properties for table property. If a default parameter exists
* in the tblProp, the value in tblProp will be kept.
*
* @param table
* property map
* @return Modified table property map
*/
private Map<String, String> addDefaultProperties(Map<String, String> tblProp) {
Map<String, String> retValue;
if (tblProp == null) {
retValue = new HashMap<String, String>();
} else {
retValue = tblProp;
}
String paraString = HiveConf.getVar(conf, ConfVars.NEWTABLEDEFAULTPARA);
if (paraString != null && !paraString.isEmpty()) {
for (String keyValuePair : paraString.split(",")) {
String[] keyValue = keyValuePair.split("=", 2);
if (keyValue.length != 2) {
continue;
}
if (!retValue.containsKey(keyValue[0])) {
retValue.put(keyValue[0], keyValue[1]);
}
}
}
return retValue;
}
/**
* Analyze the create table command. If it is a regular create-table or
* create-table-like statements, we create a DDLWork and return true. If it is
* a create-table-as-select, we get the necessary info such as the SerDe and
* Storage Format and put it in QB, and return false, indicating the rest of
* the semantic analyzer need to deal with the select statement with respect
* to the SerDe and Storage Format.
*/
private ASTNode analyzeCreateTable(ASTNode ast, QB qb)
throws SemanticException {
String tableName = getUnescapedName((ASTNode) ast.getChild(0));
String likeTableName = null;
List<FieldSchema> cols = new ArrayList<FieldSchema>();
List<FieldSchema> partCols = new ArrayList<FieldSchema>();
List<String> bucketCols = new ArrayList<String>();
List<Order> sortCols = new ArrayList<Order>();
int numBuckets = -1;
String comment = null;
String location = null;
Map<String, String> tblProps = null;
boolean ifNotExists = false;
boolean isExt = false;
ASTNode selectStmt = null;
final int CREATE_TABLE = 0; // regular CREATE TABLE
final int CTLT = 1; // CREATE TABLE LIKE ... (CTLT)
final int CTAS = 2; // CREATE TABLE AS SELECT ... (CTAS)
int command_type = CREATE_TABLE;
List<String> skewedColNames = new ArrayList<String>();
List<List<String>> skewedValues = new ArrayList<List<String>>();
Map<List<String>, String> listBucketColValuesMapping = new HashMap<List<String>, String>();
boolean storedAsDirs = false;
RowFormatParams rowFormatParams = new RowFormatParams();
StorageFormat storageFormat = new StorageFormat();
AnalyzeCreateCommonVars shared = new AnalyzeCreateCommonVars();
LOG.info("Creating table " + tableName + " position="
+ ast.getCharPositionInLine());
int numCh = ast.getChildCount();
/*
* Check the 1st-level children and do simple semantic checks: 1) CTLT and
* CTAS should not coexists. 2) CTLT or CTAS should not coexists with column
* list (target table schema). 3) CTAS does not support partitioning (for
* now).
*/
for (int num = 1; num < numCh; num++) {
ASTNode child = (ASTNode) ast.getChild(num);
if (storageFormat.fillStorageFormat(child, shared)) {
continue;
}
switch (child.getToken().getType()) {
case HiveParser.TOK_IFNOTEXISTS:
ifNotExists = true;
break;
case HiveParser.KW_EXTERNAL:
isExt = true;
break;
case HiveParser.TOK_LIKETABLE:
if (child.getChildCount() > 0) {
likeTableName = getUnescapedName((ASTNode) child.getChild(0));
if (likeTableName != null) {
if (command_type == CTAS) {
throw new SemanticException(ErrorMsg.CTAS_CTLT_COEXISTENCE
.getMsg());
}
if (cols.size() != 0) {
throw new SemanticException(ErrorMsg.CTLT_COLLST_COEXISTENCE
.getMsg());
}
}
command_type = CTLT;
}
break;
case HiveParser.TOK_QUERY: // CTAS
if (command_type == CTLT) {
throw new SemanticException(ErrorMsg.CTAS_CTLT_COEXISTENCE.getMsg());
}
if (cols.size() != 0) {
throw new SemanticException(ErrorMsg.CTAS_COLLST_COEXISTENCE.getMsg());
}
if (partCols.size() != 0 || bucketCols.size() != 0) {
boolean dynPart = HiveConf.getBoolVar(conf, HiveConf.ConfVars.DYNAMICPARTITIONING);
if (dynPart == false) {
throw new SemanticException(ErrorMsg.CTAS_PARCOL_COEXISTENCE.getMsg());
} else {
// TODO: support dynamic partition for CTAS
throw new SemanticException(ErrorMsg.CTAS_PARCOL_COEXISTENCE.getMsg());
}
}
if (isExt) {
throw new SemanticException(ErrorMsg.CTAS_EXTTBL_COEXISTENCE.getMsg());
}
command_type = CTAS;
selectStmt = child;
break;
case HiveParser.TOK_TABCOLLIST:
cols = getColumns(child);
break;
case HiveParser.TOK_TABLECOMMENT:
comment = unescapeSQLString(child.getChild(0).getText());
break;
case HiveParser.TOK_TABLEPARTCOLS:
partCols = getColumns((ASTNode) child.getChild(0), false);
break;
case HiveParser.TOK_TABLEBUCKETS:
bucketCols = getColumnNames((ASTNode) child.getChild(0));
if (child.getChildCount() == 2) {
numBuckets = (Integer.valueOf(child.getChild(1).getText()))
.intValue();
} else {
sortCols = getColumnNamesOrder((ASTNode) child.getChild(1));
numBuckets = (Integer.valueOf(child.getChild(2).getText()))
.intValue();
}
break;
case HiveParser.TOK_TABLEROWFORMAT:
rowFormatParams.analyzeRowFormat(shared, child);
break;
case HiveParser.TOK_TABLELOCATION:
location = unescapeSQLString(child.getChild(0).getText());
location = EximUtil.relativeToAbsolutePath(conf, location);
inputs.add(new ReadEntity(new Path(location), FileUtils.isLocalFile(conf, location)));
break;
case HiveParser.TOK_TABLEPROPERTIES:
tblProps = DDLSemanticAnalyzer.getProps((ASTNode) child.getChild(0));
break;
case HiveParser.TOK_TABLESERIALIZER:
child = (ASTNode) child.getChild(0);
shared.serde = unescapeSQLString(child.getChild(0).getText());
if (child.getChildCount() == 2) {
readProps((ASTNode) (child.getChild(1).getChild(0)),
shared.serdeProps);
}
break;
case HiveParser.TOK_FILEFORMAT_GENERIC:
handleGenericFileFormat(child);
break;
case HiveParser.TOK_TABLESKEWED:
/**
* Throw an error if the user tries to use the DDL with
* hive.internal.ddl.list.bucketing.enable set to false.
*/
HiveConf hiveConf = SessionState.get().getConf();
// skewed column names
skewedColNames = analyzeSkewedTablDDLColNames(skewedColNames, child);
// skewed value
analyzeDDLSkewedValues(skewedValues, child);
// stored as directories
storedAsDirs = analyzeStoredAdDirs(child);
break;
default:
throw new AssertionError("Unknown token: " + child.getToken());
}
}
storageFormat.fillDefaultStorageFormat(shared);
if ((command_type == CTAS) && (storageFormat.storageHandler != null)) {
throw new SemanticException(ErrorMsg.CREATE_NON_NATIVE_AS.getMsg());
}
// check for existence of table
if (ifNotExists) {
try {
Table table = getTableWithQN(tableName, false);
if (table != null) { // table exists
return null;
}
} catch (HiveException e) {
e.printStackTrace();
}
}
String[] qualified = Hive.getQualifiedNames(tableName);
String dbName = qualified.length == 1 ? SessionState.get().getCurrentDatabase() : qualified[0];
Database database = getDatabase(dbName);
outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED));
// Handle different types of CREATE TABLE command
CreateTableDesc crtTblDesc = null;
switch (command_type) {
case CREATE_TABLE: // REGULAR CREATE TABLE DDL
tblProps = addDefaultProperties(tblProps);
crtTblDesc = new CreateTableDesc(tableName, isExt, cols, partCols,
bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim,
rowFormatParams.fieldEscape,
rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim,
comment,
storageFormat.inputFormat, storageFormat.outputFormat, location, shared.serde,
storageFormat.storageHandler, shared.serdeProps, tblProps, ifNotExists, skewedColNames,
skewedValues);
crtTblDesc.setStoredAsSubDirectories(storedAsDirs);
crtTblDesc.setNullFormat(rowFormatParams.nullFormat);
crtTblDesc.validate(conf);
// outputs is empty, which means this create table happens in the current
// database.
SessionState.get().setCommandType(HiveOperation.CREATETABLE);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
crtTblDesc), conf));
break;
case CTLT: // create table like <tbl_name>
tblProps = addDefaultProperties(tblProps);
CreateTableLikeDesc crtTblLikeDesc = new CreateTableLikeDesc(tableName, isExt,
storageFormat.inputFormat, storageFormat.outputFormat, location,
shared.serde, shared.serdeProps, tblProps, ifNotExists, likeTableName);
SessionState.get().setCommandType(HiveOperation.CREATETABLE);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
crtTblLikeDesc), conf));
break;
case CTAS: // create table as select
// Verify that the table does not already exist
try {
Table dumpTable = db.newTable(tableName);
if (null != db.getTable(dumpTable.getDbName(), dumpTable.getTableName(), false)) {
throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(tableName));
}
} catch (HiveException e) {
throw new SemanticException(e);
}
tblProps = addDefaultProperties(tblProps);
crtTblDesc = new CreateTableDesc(dbName, tableName, isExt, cols, partCols,
bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim,
rowFormatParams.fieldEscape,
rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim,
comment, storageFormat.inputFormat,
storageFormat.outputFormat, location, shared.serde, storageFormat.storageHandler,
shared.serdeProps,
tblProps, ifNotExists, skewedColNames, skewedValues);
crtTblDesc.setStoredAsSubDirectories(storedAsDirs);
crtTblDesc.setNullFormat(rowFormatParams.nullFormat);
qb.setTableDesc(crtTblDesc);
SessionState.get().setCommandType(HiveOperation.CREATETABLE_AS_SELECT);
return selectStmt;
default:
throw new SemanticException("Unrecognized command.");
}
return null;
}
private ASTNode analyzeCreateView(ASTNode ast, QB qb)
throws SemanticException {
String tableName = getUnescapedName((ASTNode) ast.getChild(0));
List<FieldSchema> cols = null;
boolean ifNotExists = false;
boolean orReplace = false;
boolean isAlterViewAs = false;
String comment = null;
ASTNode selectStmt = null;
Map<String, String> tblProps = null;
List<String> partColNames = null;
LOG.info("Creating view " + tableName + " position="
+ ast.getCharPositionInLine());
int numCh = ast.getChildCount();
for (int num = 1; num < numCh; num++) {
ASTNode child = (ASTNode) ast.getChild(num);
switch (child.getToken().getType()) {
case HiveParser.TOK_IFNOTEXISTS:
ifNotExists = true;
break;
case HiveParser.TOK_ORREPLACE:
orReplace = true;
break;
case HiveParser.TOK_QUERY:
selectStmt = child;
break;
case HiveParser.TOK_TABCOLNAME:
cols = getColumns(child);
break;
case HiveParser.TOK_TABLECOMMENT:
comment = unescapeSQLString(child.getChild(0).getText());
break;
case HiveParser.TOK_TABLEPROPERTIES:
tblProps = DDLSemanticAnalyzer.getProps((ASTNode) child.getChild(0));
break;
case HiveParser.TOK_VIEWPARTCOLS:
partColNames = getColumnNames((ASTNode) child.getChild(0));
break;
default:
assert false;
}
}
if (ifNotExists && orReplace){
throw new SemanticException("Can't combine IF NOT EXISTS and OR REPLACE.");
}
if (ast.getToken().getType() == HiveParser.TOK_ALTERVIEW_AS) {
isAlterViewAs = true;
orReplace = true;
}
createVwDesc = new CreateViewDesc(
tableName, cols, comment, tblProps, partColNames,
ifNotExists, orReplace, isAlterViewAs);
unparseTranslator.enable();
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
createVwDesc), conf));
return selectStmt;
}
// validate the create view statement
// the statement could be CREATE VIEW, REPLACE VIEW, or ALTER VIEW AS SELECT
// check semantic conditions
private void validateCreateView(CreateViewDesc createVwDesc)
throws SemanticException {
try {
Table oldView = getTableWithQN(createVwDesc.getViewName(), false);
// ALTER VIEW AS SELECT requires the view must exist
if (createVwDesc.getIsAlterViewAs() && oldView == null) {
String viewNotExistErrorMsg =
"The following view does not exist: " + createVwDesc.getViewName();
throw new SemanticException(
ErrorMsg.ALTER_VIEW_AS_SELECT_NOT_EXIST.getMsg(viewNotExistErrorMsg));
}
//replace view
if (createVwDesc.getOrReplace() && oldView != null) {
// Existing table is not a view
if (!oldView.getTableType().equals(TableType.VIRTUAL_VIEW)) {
String tableNotViewErrorMsg =
"The following is an existing table, not a view: " +
createVwDesc.getViewName();
throw new SemanticException(
ErrorMsg.EXISTING_TABLE_IS_NOT_VIEW.getMsg(tableNotViewErrorMsg));
}
// if old view has partitions, it could not be replaced
String partitionViewErrorMsg =
"The following view has partition, it could not be replaced: " +
createVwDesc.getViewName();
try {
if ((createVwDesc.getPartCols() == null ||
createVwDesc.getPartCols().isEmpty() ||
!createVwDesc.getPartCols().equals(oldView.getPartCols())) &&
!oldView.getPartCols().isEmpty() &&
!db.getPartitions(oldView).isEmpty()) {
throw new SemanticException(
ErrorMsg.REPLACE_VIEW_WITH_PARTITION.getMsg(partitionViewErrorMsg));
}
} catch (HiveException e) {
throw new SemanticException(
ErrorMsg.REPLACE_VIEW_WITH_PARTITION.getMsg(partitionViewErrorMsg));
}
}
} catch (HiveException e) {
throw new SemanticException(e.getMessage());
}
}
// Process the position alias in GROUPBY and ORDERBY
private void processPositionAlias(ASTNode ast) throws SemanticException {
if (HiveConf.getBoolVar(conf,
HiveConf.ConfVars.HIVE_GROUPBY_ORDERBY_POSITION_ALIAS) == false) {
return;
}
if (ast.getChildCount() == 0) {
return;
}
boolean isAllCol;
ASTNode selectNode = null;
ASTNode groupbyNode = null;
ASTNode orderbyNode = null;
// get node type
int child_count = ast.getChildCount();
for (int child_pos = 0; child_pos < child_count; ++child_pos) {
ASTNode node = (ASTNode) ast.getChild(child_pos);
int type = node.getToken().getType();
if (type == HiveParser.TOK_SELECT) {
selectNode = node;
} else if (type == HiveParser.TOK_GROUPBY) {
groupbyNode = node;
} else if (type == HiveParser.TOK_ORDERBY) {
orderbyNode = node;
}
}
if (selectNode != null) {
int selectExpCnt = selectNode.getChildCount();
// replace each of the position alias in GROUPBY with the actual column name
if (groupbyNode != null) {
for (int child_pos = 0; child_pos < groupbyNode.getChildCount(); ++child_pos) {
ASTNode node = (ASTNode) groupbyNode.getChild(child_pos);
if (node.getToken().getType() == HiveParser.Number) {
int pos = Integer.parseInt(node.getText());
if (pos > 0 && pos <= selectExpCnt) {
groupbyNode.setChild(child_pos,
selectNode.getChild(pos - 1).getChild(0));
} else {
throw new SemanticException(
ErrorMsg.INVALID_POSITION_ALIAS_IN_GROUPBY.getMsg(
"Position alias: " + pos + " does not exist\n" +
"The Select List is indexed from 1 to " + selectExpCnt));
}
}
}
}
// replace each of the position alias in ORDERBY with the actual column name
if (orderbyNode != null) {
isAllCol = false;
for (int child_pos = 0; child_pos < selectNode.getChildCount(); ++child_pos) {
ASTNode node = (ASTNode) selectNode.getChild(child_pos).getChild(0);
if (node.getToken().getType() == HiveParser.TOK_ALLCOLREF) {
isAllCol = true;
}
}
for (int child_pos = 0; child_pos < orderbyNode.getChildCount(); ++child_pos) {
ASTNode colNode = (ASTNode) orderbyNode.getChild(child_pos);
ASTNode node = (ASTNode) colNode.getChild(0);
if (node.getToken().getType() == HiveParser.Number) {
if (!isAllCol) {
int pos = Integer.parseInt(node.getText());
if (pos > 0 && pos <= selectExpCnt) {
colNode.setChild(0, selectNode.getChild(pos - 1).getChild(0));
} else {
throw new SemanticException(
ErrorMsg.INVALID_POSITION_ALIAS_IN_ORDERBY.getMsg(
"Position alias: " + pos + " does not exist\n" +
"The Select List is indexed from 1 to " + selectExpCnt));
}
} else {
throw new SemanticException(
ErrorMsg.NO_SUPPORTED_ORDERBY_ALLCOLREF_POS.getMsg());
}
}
}
}
}
// Recursively process through the children ASTNodes
for (int child_pos = 0; child_pos < child_count; ++child_pos) {
processPositionAlias((ASTNode) ast.getChild(child_pos));
}
return;
}
/**
* process analyze ... partial command
*
* separate it from noscan command process so that it provides us flexibility
*
* @param tree
* @throws SemanticException
*/
protected void processPartialScanCommand (ASTNode tree) throws SemanticException {
// check if it is partial scan command
this.checkPartialScan(tree);
//validate partial scan
if (this.partialscan) {
validateAnalyzePartialscan(tree);
}
}
/**
* process analyze ... noscan command
* @param tree
* @throws SemanticException
*/
protected void processNoScanCommand (ASTNode tree) throws SemanticException {
// check if it is noscan command
checkNoScan(tree);
//validate noscan
if (this.noscan) {
validateAnalyzeNoscan(tree);
}
}
/**
* Validate noscan command
*
* @param tree
* @throws SemanticException
*/
private void validateAnalyzeNoscan(ASTNode tree) throws SemanticException {
// since it is noscan, it is true table name in command
String tableName = getUnescapedName((ASTNode) tree.getChild(0).getChild(0));
Table tbl;
try {
tbl = db.getTable(tableName);
} catch (HiveException e) {
throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
}
/* noscan uses hdfs apis to retrieve such information from Namenode. */
/* But that will be specific to hdfs. Through storagehandler mechanism, */
/* storage of table could be on any storage system: hbase, cassandra etc. */
/* A nice error message should be given to user. */
if (tbl.isNonNative()) {
throw new SemanticException(ErrorMsg.ANALYZE_TABLE_NOSCAN_NON_NATIVE.getMsg(tbl
.getTableName()));
}
}
/**
* Validate partialscan command
*
* @param tree
* @throws SemanticException
*/
private void validateAnalyzePartialscan(ASTNode tree) throws SemanticException {
// since it is partialscan, it is true table name in command
String tableName = getUnescapedName((ASTNode) tree.getChild(0).getChild(0));
Table tbl;
try {
tbl = db.getTable(tableName);
} catch (HiveException e) {
throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
}
/* partialscan uses hdfs apis to retrieve such information from Namenode. */
/* But that will be specific to hdfs. Through storagehandler mechanism, */
/* storage of table could be on any storage system: hbase, cassandra etc. */
/* A nice error message should be given to user. */
if (tbl.isNonNative()) {
throw new SemanticException(ErrorMsg.ANALYZE_TABLE_PARTIALSCAN_NON_NATIVE.getMsg(tbl
.getTableName()));
}
/**
* Partial scan doesn't support external table.
*/
if(tbl.getTableType().equals(TableType.EXTERNAL_TABLE)) {
throw new SemanticException(ErrorMsg.ANALYZE_TABLE_PARTIALSCAN_EXTERNAL_TABLE.getMsg(tbl
.getTableName()));
}
if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
throw new SemanticException(ErrorMsg.ANALYZE_TABLE_PARTIALSCAN_AUTOGATHER.getMsg());
}
}
/**
* It will check if this is analyze ... compute statistics noscan
* @param tree
*/
private void checkNoScan(ASTNode tree) {
if (tree.getChildCount() > 1) {
ASTNode child0 = (ASTNode) tree.getChild(0);
ASTNode child1;
if (child0.getToken().getType() == HiveParser.TOK_TAB) {
child0 = (ASTNode) child0.getChild(0);
if (child0.getToken().getType() == HiveParser.TOK_TABNAME) {
child1 = (ASTNode) tree.getChild(1);
if (child1.getToken().getType() == HiveParser.KW_NOSCAN) {
this.noscan = true;
}
}
}
}
}
/**
* It will check if this is analyze ... compute statistics partialscan
* @param tree
*/
private void checkPartialScan(ASTNode tree) {
if (tree.getChildCount() > 1) {
ASTNode child0 = (ASTNode) tree.getChild(0);
ASTNode child1;
if (child0.getToken().getType() == HiveParser.TOK_TAB) {
child0 = (ASTNode) child0.getChild(0);
if (child0.getToken().getType() == HiveParser.TOK_TABNAME) {
child1 = (ASTNode) tree.getChild(1);
if (child1.getToken().getType() == HiveParser.KW_PARTIALSCAN) {
this.partialscan = true;
}
}
}
}
}
public QB getQB() {
return qb;
}
public void setQB(QB qb) {
this.qb = qb;
}
//--------------------------- PTF handling -----------------------------------
/*
* - a partitionTableFunctionSource can be a tableReference, a SubQuery or another
* PTF invocation.
* - For a TABLEREF: set the source to the alias returned by processTable
* - For a SubQuery: set the source to the alias returned by processSubQuery
* - For a PTF invocation: recursively call processPTFChain.
*/
private PTFInputSpec processPTFSource(QB qb, ASTNode inputNode) throws SemanticException{
PTFInputSpec qInSpec = null;
int type = inputNode.getType();
String alias;
switch(type)
{
case HiveParser.TOK_TABREF:
alias = processTable(qb, inputNode);
qInSpec = new PTFQueryInputSpec();
((PTFQueryInputSpec)qInSpec).setType(PTFQueryInputType.TABLE);
((PTFQueryInputSpec)qInSpec).setSource(alias);
break;
case HiveParser.TOK_SUBQUERY:
alias = processSubQuery(qb, inputNode);
qInSpec = new PTFQueryInputSpec();
((PTFQueryInputSpec)qInSpec).setType(PTFQueryInputType.SUBQUERY);
((PTFQueryInputSpec)qInSpec).setSource(alias);
break;
case HiveParser.TOK_PTBLFUNCTION:
qInSpec = processPTFChain(qb, inputNode);
break;
default:
throw new SemanticException(generateErrorMessage(inputNode,
"Unknown input type to PTF"));
}
qInSpec.setAstNode(inputNode);
return qInSpec;
}
/*
* - tree form is
* ^(TOK_PTBLFUNCTION name alias? partitionTableFunctionSource partitioningSpec? arguments*)
* - a partitionTableFunctionSource can be a tableReference, a SubQuery or another
* PTF invocation.
*/
private PartitionedTableFunctionSpec processPTFChain(QB qb, ASTNode ptf)
throws SemanticException{
int child_count = ptf.getChildCount();
if (child_count < 2) {
throw new SemanticException(generateErrorMessage(ptf,
"Not enough Children " + child_count));
}
PartitionedTableFunctionSpec ptfSpec = new PartitionedTableFunctionSpec();
ptfSpec.setAstNode(ptf);
/*
* name
*/
ASTNode nameNode = (ASTNode) ptf.getChild(0);
ptfSpec.setName(nameNode.getText());
int inputIdx = 1;
/*
* alias
*/
ASTNode secondChild = (ASTNode) ptf.getChild(1);
if ( secondChild.getType() == HiveParser.Identifier ) {
ptfSpec.setAlias(secondChild.getText());
inputIdx++;
}
/*
* input
*/
ASTNode inputNode = (ASTNode) ptf.getChild(inputIdx);
ptfSpec.setInput(processPTFSource(qb, inputNode));
int argStartIdx = inputIdx + 1;
/*
* partitioning Spec
*/
int pSpecIdx = inputIdx + 1;
ASTNode pSpecNode = ptf.getChildCount() > inputIdx ?
(ASTNode) ptf.getChild(pSpecIdx) : null;
if (pSpecNode != null && pSpecNode.getType() == HiveParser.TOK_PARTITIONINGSPEC)
{
PartitioningSpec partitioning = processPTFPartitionSpec(pSpecNode);
ptfSpec.setPartitioning(partitioning);
argStartIdx++;
}
/*
* arguments
*/
for(int i=argStartIdx; i < ptf.getChildCount(); i++)
{
ptfSpec.addArg((ASTNode) ptf.getChild(i));
}
return ptfSpec;
}
/*
* - invoked during FROM AST tree processing, on encountering a PTF invocation.
* - tree form is
* ^(TOK_PTBLFUNCTION name partitionTableFunctionSource partitioningSpec? arguments*)
* - setup a PTFInvocationSpec for this top level PTF invocation.
*/
private void processPTF(QB qb, ASTNode ptf) throws SemanticException{
PartitionedTableFunctionSpec ptfSpec = processPTFChain(qb, ptf);
if ( ptfSpec.getAlias() != null ) {
qb.addAlias(ptfSpec.getAlias());
}
PTFInvocationSpec spec = new PTFInvocationSpec();
spec.setFunction(ptfSpec);
qb.addPTFNodeToSpec(ptf, spec);
}
private void handleQueryWindowClauses(QB qb, Phase1Ctx ctx_1, ASTNode node)
throws SemanticException {
WindowingSpec spec = qb.getWindowingSpec(ctx_1.dest);
for(int i=0; i < node.getChildCount(); i++) {
processQueryWindowClause(spec, (ASTNode) node.getChild(i));
}
}
private PartitionSpec processPartitionSpec(ASTNode node) {
PartitionSpec pSpec = new PartitionSpec();
int exprCnt = node.getChildCount();
for(int i=0; i < exprCnt; i++) {
PartitionExpression exprSpec = new PartitionExpression();
exprSpec.setExpression((ASTNode) node.getChild(i));
pSpec.addExpression(exprSpec);
}
return pSpec;
}
private OrderSpec processOrderSpec(ASTNode sortNode) {
OrderSpec oSpec = new OrderSpec();
int exprCnt = sortNode.getChildCount();
for(int i=0; i < exprCnt; i++) {
OrderExpression exprSpec = new OrderExpression();
exprSpec.setExpression((ASTNode) sortNode.getChild(i).getChild(0));
if ( sortNode.getChild(i).getType() == HiveParser.TOK_TABSORTCOLNAMEASC ) {
exprSpec.setOrder(org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.Order.ASC);
}
else {
exprSpec.setOrder(org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.Order.DESC);
}
oSpec.addExpression(exprSpec);
}
return oSpec;
}
private PartitioningSpec processPTFPartitionSpec(ASTNode pSpecNode)
{
PartitioningSpec partitioning = new PartitioningSpec();
ASTNode firstChild = (ASTNode) pSpecNode.getChild(0);
int type = firstChild.getType();
int exprCnt;
if ( type == HiveParser.TOK_DISTRIBUTEBY || type == HiveParser.TOK_CLUSTERBY )
{
PartitionSpec pSpec = processPartitionSpec(firstChild);
partitioning.setPartSpec(pSpec);
ASTNode sortNode = pSpecNode.getChildCount() > 1 ? (ASTNode) pSpecNode.getChild(1) : null;
if ( sortNode != null )
{
OrderSpec oSpec = processOrderSpec(sortNode);
partitioning.setOrderSpec(oSpec);
}
}
else if ( type == HiveParser.TOK_SORTBY || type == HiveParser.TOK_ORDERBY ) {
ASTNode sortNode = firstChild;
OrderSpec oSpec = processOrderSpec(sortNode);
partitioning.setOrderSpec(oSpec);
}
return partitioning;
}
private WindowFunctionSpec processWindowFunction(ASTNode node, ASTNode wsNode)
throws SemanticException {
WindowFunctionSpec wfSpec = new WindowFunctionSpec();
switch(node.getType()) {
case HiveParser.TOK_FUNCTIONSTAR:
wfSpec.setStar(true);
break;
case HiveParser.TOK_FUNCTIONDI:
wfSpec.setDistinct(true);
break;
}
if ( wfSpec.isDistinct() ) {
throw new SemanticException(generateErrorMessage(node,
"Count/Sum distinct not supported with Windowing"));
}
wfSpec.setExpression(node);
ASTNode nameNode = (ASTNode) node.getChild(0);
wfSpec.setName(nameNode.getText());
for(int i=1; i < node.getChildCount()-1; i++) {
ASTNode child = (ASTNode) node.getChild(i);
wfSpec.addArg(child);
}
if ( wsNode != null ) {
WindowSpec ws = processWindowSpec(wsNode);
wfSpec.setWindowSpec(ws);
}
return wfSpec;
}
private boolean containsLeadLagUDF(ASTNode expressionTree) {
int exprTokenType = expressionTree.getToken().getType();
if (exprTokenType == HiveParser.TOK_FUNCTION) {
assert (expressionTree.getChildCount() != 0);
if (expressionTree.getChild(0).getType() == HiveParser.Identifier) {
String functionName = unescapeIdentifier(expressionTree.getChild(0)
.getText());
functionName = functionName.toLowerCase();
if ( FunctionRegistry.LAG_FUNC_NAME.equals(functionName) ||
FunctionRegistry.LEAD_FUNC_NAME.equals(functionName)
) {
return true;
}
}
}
for (int i = 0; i < expressionTree.getChildCount(); i++) {
if ( containsLeadLagUDF((ASTNode) expressionTree.getChild(i))) {
return true;
}
}
return false;
}
private void processQueryWindowClause(WindowingSpec spec, ASTNode node)
throws SemanticException {
ASTNode nameNode = (ASTNode) node.getChild(0);
ASTNode wsNode = (ASTNode) node.getChild(1);
if(spec.getWindowSpecs() != null && spec.getWindowSpecs().containsKey(nameNode.getText())){
throw new SemanticException(generateErrorMessage(nameNode,
"Duplicate definition of window " + nameNode.getText() +
" is not allowed"));
}
WindowSpec ws = processWindowSpec(wsNode);
spec.addWindowSpec(nameNode.getText(), ws);
}
private WindowSpec processWindowSpec(ASTNode node) throws SemanticException {
String sourceId = null;
PartitionSpec partition = null;
OrderSpec order = null;
WindowFrameSpec windowFrame = null;
boolean hasSrcId = false, hasPartSpec = false, hasWF = false;
int srcIdIdx = -1, partIdx = -1, wfIdx = -1;
for(int i=0; i < node.getChildCount(); i++)
{
int type = node.getChild(i).getType();
switch(type)
{
case HiveParser.Identifier:
hasSrcId = true; srcIdIdx = i;
break;
case HiveParser.TOK_PARTITIONINGSPEC:
hasPartSpec = true; partIdx = i;
break;
case HiveParser.TOK_WINDOWRANGE:
case HiveParser.TOK_WINDOWVALUES:
hasWF = true; wfIdx = i;
break;
}
}
WindowSpec ws = new WindowSpec();
if (hasSrcId) {
ASTNode nameNode = (ASTNode) node.getChild(srcIdIdx);
ws.setSourceId(nameNode.getText());
}
if (hasPartSpec) {
ASTNode partNode = (ASTNode) node.getChild(partIdx);
PartitioningSpec partitioning = processPTFPartitionSpec(partNode);
ws.setPartitioning(partitioning);
}
if ( hasWF)
{
ASTNode wfNode = (ASTNode) node.getChild(wfIdx);
WindowFrameSpec wfSpec = processWindowFrame(wfNode);
ws.setWindowFrame(wfSpec);
}
return ws;
}
private WindowFrameSpec processWindowFrame(ASTNode node) throws SemanticException {
int type = node.getType();
BoundarySpec start = null, end = null;
/*
* A WindowFrame may contain just the Start Boundary or in the
* between style of expressing a WindowFrame both boundaries
* are specified.
*/
start = processBoundary(type, (ASTNode) node.getChild(0));
if ( node.getChildCount() > 1 ) {
end = processBoundary(type, (ASTNode) node.getChild(1));
}
return new WindowFrameSpec(start, end);
}
private BoundarySpec processBoundary(int frameType, ASTNode node) throws SemanticException {
BoundarySpec bs = frameType == HiveParser.TOK_WINDOWRANGE ?
new RangeBoundarySpec() : new ValueBoundarySpec();
int type = node.getType();
boolean hasAmt = true;
switch(type)
{
case HiveParser.KW_PRECEDING:
bs.setDirection(Direction.PRECEDING);
break;
case HiveParser.KW_FOLLOWING:
bs.setDirection(Direction.FOLLOWING);
break;
case HiveParser.KW_CURRENT:
bs = new CurrentRowSpec();
hasAmt = false;
break;
}
if ( hasAmt )
{
ASTNode amtNode = (ASTNode) node.getChild(0);
if ( amtNode.getType() == HiveParser.KW_UNBOUNDED)
{
bs.setAmt(BoundarySpec.UNBOUNDED_AMOUNT);
}
else
{
int amt = Integer.parseInt(amtNode.getText());
if ( amt < 0 ) {
throw new SemanticException(
"Window Frame Boundary Amount must be a +ve integer, amount provide is: " + amt);
}
bs.setAmt(amt);
}
}
return bs;
}
/*
* check if a Select Expr is a constant.
* - current logic used is to look for HiveParser.TOK_TABLE_OR_COL
* - if there is none then the expression is a constant.
*/
private static class ConstantExprCheck implements ContextVisitor {
boolean isConstant = true;
@Override
public void visit(Object t, Object parent, int childIndex, Map labels) {
if ( !isConstant ) {
return;
}
ASTNode node = (ASTNode) t;
if (ParseDriver.adaptor.getType(t) == HiveParser.TOK_TABLE_OR_COL ) {
isConstant = false;
}
}
public void reset() {
isConstant = true;
}
protected boolean isConstant() {
return isConstant;
}
}
private static class AggregationExprCheck implements ContextVisitor {
HashMap<String, ASTNode> destAggrExprs;
boolean isAggr = false;
public AggregationExprCheck(HashMap<String, ASTNode> destAggrExprs) {
super();
this.destAggrExprs = destAggrExprs;
}
@Override
public void visit(Object t, Object parent, int childIndex, Map labels) {
if ( isAggr ) {
return;
}
if ( destAggrExprs.values().contains(t)) {
isAggr = true;
}
}
public void reset() {
isAggr = false;
}
protected boolean isAggr() {
return isAggr;
}
}
/*
* Returns false if there is a SelectExpr that is not a constant or an aggr.
*
*/
private boolean isValidGroupBySelectList(QB currQB, String clause){
ConstantExprCheck constantExprCheck = new ConstantExprCheck();
AggregationExprCheck aggrExprCheck = new AggregationExprCheck(
currQB.getParseInfo().getAggregationExprsForClause(clause));
TreeWizard tw = new TreeWizard(ParseDriver.adaptor, HiveParser.tokenNames);
ASTNode selectNode = currQB.getParseInfo().getSelForClause(clause);
/*
* for Select Distinct Queries we don't move any aggregations.
*/
if ( selectNode != null && selectNode.getType() == HiveParser.TOK_SELECTDI ) {
return true;
}
for (int i = 0; selectNode != null && i < selectNode.getChildCount(); i++) {
ASTNode selectExpr = (ASTNode) selectNode.getChild(i);
//check for TOK_HINTLIST expressions on ast
if(selectExpr.getType() != HiveParser.TOK_SELEXPR){
continue;
}
constantExprCheck.reset();
PTFTranslator.visit(selectExpr.getChild(0), constantExprCheck);
if ( !constantExprCheck.isConstant() ) {
aggrExprCheck.reset();
PTFTranslator.visit(selectExpr.getChild(0), aggrExprCheck);
if (!aggrExprCheck.isAggr() ) {
return false;
}
}
}
return true;
}
//--------------------------- PTF handling: PTFInvocationSpec to PTFDesc --------------------------
private PTFDesc translatePTFInvocationSpec(PTFInvocationSpec ptfQSpec, RowResolver inputRR)
throws SemanticException{
PTFDesc ptfDesc = null;
PTFTranslator translator = new PTFTranslator();
ptfDesc = translator.translate(ptfQSpec, this, conf, inputRR, unparseTranslator);
return ptfDesc;
}
Operator genPTFPlan(PTFInvocationSpec ptfQSpec, Operator input) throws SemanticException {
ArrayList<PTFInvocationSpec> componentQueries = PTFTranslator.componentize(ptfQSpec);
for (PTFInvocationSpec ptfSpec : componentQueries) {
input = genPTFPlanForComponentQuery(ptfSpec, input);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Created PTF Plan ");
}
return input;
}
/**
* Construct the data structures containing ExprNodeDesc for partition
* columns and order columns. Use the input definition to construct the list
* of output columns for the ReduceSinkOperator
*
* @throws SemanticException
*/
void buildPTFReduceSinkDetails(PartitionedTableFunctionDef tabDef,
RowResolver inputRR,
ArrayList<ExprNodeDesc> partCols,
ArrayList<ExprNodeDesc> valueCols,
ArrayList<ExprNodeDesc> orderCols,
Map<String, ExprNodeDesc> colExprMap,
List<String> outputColumnNames,
StringBuilder orderString,
RowResolver rsOpRR,
RowResolver extractRR) throws SemanticException {
List<PTFExpressionDef> partColList = tabDef.getPartition().getExpressions();
for (PTFExpressionDef colDef : partColList) {
partCols.add(colDef.getExprNode());
orderCols.add(colDef.getExprNode());
orderString.append('+');
}
/*
* Order columns are used as key columns for constructing
* the ReduceSinkOperator
* Since we do not explicitly add these to outputColumnNames,
* we need to set includeKeyCols = false while creating the
* ReduceSinkDesc
*/
List<OrderExpressionDef> orderColList = tabDef.getOrder().getExpressions();
for (int i = 0; i < orderColList.size(); i++) {
OrderExpressionDef colDef = orderColList.get(i);
org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.Order order = colDef.getOrder();
if (order.name().equals("ASC")) {
orderString.append('+');
} else {
orderString.append('-');
}
orderCols.add(colDef.getExprNode());
}
ArrayList<ColumnInfo> colInfoList = inputRR.getColumnInfos();
/*
* construct the ReduceSinkRR
*/
int pos = 0;
for (ColumnInfo colInfo : colInfoList) {
ExprNodeDesc valueColExpr = new ExprNodeColumnDesc(colInfo.getType(), colInfo
.getInternalName(), colInfo.getTabAlias(), colInfo
.getIsVirtualCol());
valueCols.add(valueColExpr);
String internalName = SemanticAnalyzer.getColumnInternalName(pos++);
outputColumnNames.add(internalName);
colExprMap.put(internalName, valueColExpr);
String[] alias = inputRR.reverseLookup(colInfo.getInternalName());
ColumnInfo newColInfo = new ColumnInfo(
internalName, colInfo.getType(), alias[0],
colInfo.getIsVirtualCol(), colInfo.isHiddenVirtualCol());
rsOpRR.put(alias[0], alias[1], newColInfo);
}
/*
* construct the ExtractRR
*/
LinkedHashMap<String[], ColumnInfo> colsAddedByHaving =
new LinkedHashMap<String[], ColumnInfo>();
pos = 0;
for (ColumnInfo colInfo : colInfoList) {
String[] alias = inputRR.reverseLookup(colInfo.getInternalName());
/*
* if we have already encountered this colInfo internalName.
* We encounter it again because it must be put for the Having clause.
* We will add these entries in the end; in a loop on colsAddedByHaving. See below.
*/
if ( colsAddedByHaving.containsKey(alias)) {
continue;
}
ASTNode astNode = PTFTranslator.getASTNode(colInfo, inputRR);
ColumnInfo eColInfo = new ColumnInfo(
SemanticAnalyzer.getColumnInternalName(pos++), colInfo.getType(), alias[0],
colInfo.getIsVirtualCol(), colInfo.isHiddenVirtualCol());
if ( astNode == null ) {
extractRR.put(alias[0], alias[1], eColInfo);
}
else {
/*
* in case having clause refers to this column may have been added twice;
* once with the ASTNode.toStringTree as the alias
* and then with the real alias.
*/
extractRR.putExpression(astNode, eColInfo);
if ( !astNode.toStringTree().toLowerCase().equals(alias[1]) ) {
colsAddedByHaving.put(alias, eColInfo);
}
}
}
for(Map.Entry<String[], ColumnInfo> columnAddedByHaving : colsAddedByHaving.entrySet() ) {
String[] alias = columnAddedByHaving.getKey();
ColumnInfo eColInfo = columnAddedByHaving.getValue();
extractRR.put(alias[0], alias[1], eColInfo);
}
}
private Operator genPTFPlanForComponentQuery(PTFInvocationSpec ptfQSpec, Operator input)
throws SemanticException {
/*
* 1. Create the PTFDesc from the Qspec attached to this QB.
*/
RowResolver rr = opParseCtx.get(input).getRowResolver();
PTFDesc ptfDesc = translatePTFInvocationSpec(ptfQSpec, rr);
RowResolver rsOpRR = new RowResolver();
/*
* Build an RR for the Extract Op from the ReduceSink Op's RR.
* Why?
* We need to remove the Virtual Columns present in the RS's RR. The OI
* that gets passed to Extract at runtime doesn't contain the Virtual Columns.
* So internal names get changed. Consider testCase testJoinWithLeadLag,
* which is a self join on part and also has a Windowing expression.
* The RR of the RS op at translation time looks something like this:
* (_co1,_col2,..,_col7, _col8(vc=true),_col9(vc=true),
* _col10,_col11,.._col15(vc=true),_col16(vc=true),..)
* At runtime the Virtual columns are removed and all the columns after _col7
* are shifted 1 or 2 positions.
* So in child Operators ColumnExprNodeDesc's are no longer referring to the right columns.
*
* So we build a new RR for the Extract Op, with the Virtual Columns removed.
* We hand this to the PTFTranslator as the
* starting RR to use to translate a PTF Chain.
*/
RowResolver extractOpRR = new RowResolver();
/*
* 2. build Map-side Op Graph. Graph template is either:
* Input -> PTF_map -> ReduceSink
* or
* Input -> ReduceSink
*
* Here the ExprNodeDescriptors in the QueryDef are based on the Input Operator's RR.
*/
{
PartitionedTableFunctionDef tabDef = ptfDesc.getStartOfChain();
/*
* a. add Map-side PTF Operator if needed
*/
if (tabDef.isTransformsRawInput() )
{
RowResolver ptfMapRR = tabDef.getRawInputShape().getRr();
ptfDesc.setMapSide(true);
input = putOpInsertMap(OperatorFactory.getAndMakeChild(ptfDesc,
new RowSchema(ptfMapRR.getColumnInfos()),
input), ptfMapRR);
rr = opParseCtx.get(input).getRowResolver();
}
/*
* b. Build Reduce Sink Details (keyCols, valueCols, outColNames etc.) for this ptfDesc.
*/
ArrayList<ExprNodeDesc> partCols = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> valueCols = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> orderCols = new ArrayList<ExprNodeDesc>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
List<String> outputColumnNames = new ArrayList<String>();
StringBuilder orderString = new StringBuilder();
/*
* Use the input RR of TableScanOperator in case there is no map-side
* reshape of input.
* If the parent of ReduceSinkOperator is PTFOperator, use it's
* output RR.
*/
buildPTFReduceSinkDetails(tabDef,
rr,
partCols,
valueCols,
orderCols,
colExprMap,
outputColumnNames,
orderString,
rsOpRR,
extractOpRR);
input = putOpInsertMap(OperatorFactory.getAndMakeChild(PlanUtils
.getReduceSinkDesc(orderCols,
valueCols, outputColumnNames, false,
-1, partCols, orderString.toString(), -1),
new RowSchema(rsOpRR.getColumnInfos()), input), rsOpRR);
input.setColumnExprMap(colExprMap);
}
/*
* 3. build Reduce-side Op Graph
*/
{
/*
* b. Construct Extract Operator.
*/
input = putOpInsertMap(OperatorFactory.getAndMakeChild(
new ExtractDesc(
new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,
Utilities.ReduceField.VALUE
.toString(), "", false)),
new RowSchema(extractOpRR.getColumnInfos()),
input), extractOpRR);
/*
* c. Rebuilt the QueryDef.
* Why?
* - so that the ExprNodeDescriptors in the QueryDef are based on the
* Extract Operator's RowResolver
*/
rr = opParseCtx.get(input).getRowResolver();
ptfDesc = translatePTFInvocationSpec(ptfQSpec, rr);
/*
* d. Construct PTF Operator.
*/
RowResolver ptfOpRR = ptfDesc.getFuncDef().getOutputShape().getRr();
input = putOpInsertMap(OperatorFactory.getAndMakeChild(ptfDesc,
new RowSchema(ptfOpRR.getColumnInfos()),
input), ptfOpRR);
}
return input;
}
//--------------------------- Windowing handling: PTFInvocationSpec to PTFDesc --------------------
Operator genWindowingPlan(WindowingSpec wSpec, Operator input) throws SemanticException {
wSpec.validateAndMakeEffective();
WindowingComponentizer groups = new WindowingComponentizer(wSpec);
RowResolver rr = opParseCtx.get(input).getRowResolver();
while(groups.hasNext() ) {
wSpec = groups.next(conf, this, unparseTranslator, rr);
input = genReduceSinkPlanForWindowing(wSpec, rr, input);
rr = opParseCtx.get(input).getRowResolver();
PTFTranslator translator = new PTFTranslator();
PTFDesc ptfDesc = translator.translate(wSpec, this, conf, rr, unparseTranslator);
RowResolver ptfOpRR = ptfDesc.getFuncDef().getOutputShape().getRr();
input = putOpInsertMap(OperatorFactory.getAndMakeChild(ptfDesc,
new RowSchema(ptfOpRR.getColumnInfos()),
input), ptfOpRR);
rr = ptfOpRR;
}
return input;
}
private Operator genReduceSinkPlanForWindowing(WindowingSpec spec,
RowResolver inputRR,
Operator input) throws SemanticException{
ArrayList<ExprNodeDesc> partCols = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> valueCols = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> orderCols = new ArrayList<ExprNodeDesc>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
List<String> outputColumnNames = new ArrayList<String>();
StringBuilder orderString = new StringBuilder();
ArrayList<PartitionExpression> partColList = spec.getQueryPartitionSpec().getExpressions();
for (PartitionExpression partCol : partColList) {
ExprNodeDesc partExpr = genExprNodeDesc(partCol.getExpression(), inputRR);
partCols.add(partExpr);
orderCols.add(partExpr);
orderString.append('+');
}
ArrayList<OrderExpression> orderColList = spec.getQueryOrderSpec() == null ?
new ArrayList<PTFInvocationSpec.OrderExpression>() :
spec.getQueryOrderSpec().getExpressions();
for (int i = 0; i < orderColList.size(); i++) {
OrderExpression orderCol = orderColList.get(i);
org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.Order order = orderCol.getOrder();
if (order.name().equals("ASC")) {
orderString.append('+');
} else {
orderString.append('-');
}
ExprNodeDesc orderExpr = genExprNodeDesc(orderCol.getExpression(), inputRR);
orderCols.add(orderExpr);
}
ArrayList<ColumnInfo> colInfoList = inputRR.getColumnInfos();
RowResolver rsNewRR = new RowResolver();
int pos = 0;
for (ColumnInfo colInfo : colInfoList) {
ExprNodeDesc valueColExpr = new ExprNodeColumnDesc(colInfo.getType(), colInfo
.getInternalName(), colInfo.getTabAlias(), colInfo
.getIsVirtualCol());
valueCols.add(valueColExpr);
String internalName = SemanticAnalyzer.getColumnInternalName(pos++);
outputColumnNames.add(internalName);
colExprMap.put(internalName, valueColExpr);
String[] alias = inputRR.reverseLookup(colInfo.getInternalName());
ColumnInfo newColInfo = new ColumnInfo(
internalName, colInfo.getType(), alias[0],
colInfo.getIsVirtualCol(), colInfo.isHiddenVirtualCol());
rsNewRR.put(alias[0], alias[1], newColInfo);
String[] altMapping = inputRR.getAlternateMappings(colInfo.getInternalName());
if ( altMapping != null ) {
rsNewRR.put(altMapping[0], altMapping[1], newColInfo);
}
}
input = putOpInsertMap(OperatorFactory.getAndMakeChild(PlanUtils
.getReduceSinkDesc(orderCols,
valueCols, outputColumnNames, false,
-1, partCols, orderString.toString(), -1),
new RowSchema(rsNewRR.getColumnInfos()), input), rsNewRR);
input.setColumnExprMap(colExprMap);
// Construct the RR for extract operator
RowResolver extractRR = new RowResolver();
LinkedHashMap<String[], ColumnInfo> colsAddedByHaving =
new LinkedHashMap<String[], ColumnInfo>();
pos = 0;
for (ColumnInfo colInfo : colInfoList) {
String[] alias = inputRR.reverseLookup(colInfo.getInternalName());
/*
* if we have already encountered this colInfo internalName.
* We encounter it again because it must be put for the Having clause.
* We will add these entries in the end; in a loop on colsAddedByHaving. See below.
*/
if ( colsAddedByHaving.containsKey(alias)) {
continue;
}
ASTNode astNode = PTFTranslator.getASTNode(colInfo, inputRR);
ColumnInfo eColInfo = new ColumnInfo(
SemanticAnalyzer.getColumnInternalName(pos++), colInfo.getType(), alias[0],
colInfo.getIsVirtualCol(), colInfo.isHiddenVirtualCol());
if ( astNode == null ) {
extractRR.put(alias[0], alias[1], eColInfo);
}
else {
/*
* in case having clause refers to this column may have been added twice;
* once with the ASTNode.toStringTree as the alias
* and then with the real alias.
*/
extractRR.putExpression(astNode, eColInfo);
if ( !astNode.toStringTree().toLowerCase().equals(alias[1]) ) {
colsAddedByHaving.put(alias, eColInfo);
}
}
String[] altMapping = inputRR.getAlternateMappings(colInfo.getInternalName());
if ( altMapping != null ) {
extractRR.put(altMapping[0], altMapping[1], eColInfo);
}
}
for(Map.Entry<String[], ColumnInfo> columnAddedByHaving : colsAddedByHaving.entrySet() ) {
String[] alias = columnAddedByHaving.getKey();
ColumnInfo eColInfo = columnAddedByHaving.getValue();
extractRR.put(alias[0], alias[1], eColInfo);
}
/*
* b. Construct Extract Operator.
*/
input = putOpInsertMap(OperatorFactory.getAndMakeChild(
new ExtractDesc(
new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,
Utilities.ReduceField.VALUE
.toString(), "", false)),
new RowSchema(inputRR.getColumnInfos()),
input), extractRR);
return input;
}
public static ArrayList<WindowExpressionSpec> parseSelect(String selectExprStr)
throws SemanticException
{
ASTNode selNode = null;
try {
ParseDriver pd = new ParseDriver();
selNode = pd.parseSelect(selectExprStr, null);
} catch (ParseException pe) {
throw new SemanticException(pe);
}
ArrayList<WindowExpressionSpec> selSpec = new ArrayList<WindowExpressionSpec>();
int childCount = selNode.getChildCount();
for (int i = 0; i < childCount; i++) {
ASTNode selExpr = (ASTNode) selNode.getChild(i);
if (selExpr.getType() != HiveParser.TOK_SELEXPR) {
throw new SemanticException(String.format(
"Only Select expressions supported in dynamic select list: %s", selectExprStr));
}
ASTNode expr = (ASTNode) selExpr.getChild(0);
if (expr.getType() == HiveParser.TOK_ALLCOLREF) {
throw new SemanticException(
String.format("'%s' column not allowed in dynamic select list", selectExprStr));
}
ASTNode aliasNode = selExpr.getChildCount() > 1
&& selExpr.getChild(1).getType() == HiveParser.Identifier ?
(ASTNode) selExpr.getChild(1) : null;
String alias = null;
if ( aliasNode != null ) {
alias = aliasNode.getText();
}
else {
String[] tabColAlias = getColAlias(selExpr, null, null, true, -1);
alias = tabColAlias[1];
}
WindowExpressionSpec exprSpec = new WindowExpressionSpec();
exprSpec.setAlias(alias);
exprSpec.setExpression(expr);
selSpec.add(exprSpec);
}
return selSpec;
}
private void addAlternateGByKeyMappings(ASTNode gByExpr, ColumnInfo colInfo,
Operator<? extends OperatorDesc> reduceSinkOp, RowResolver gByRR) {
if ( gByExpr.getType() == HiveParser.DOT
&& gByExpr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL ) {
String tab_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr
.getChild(0).getChild(0).getText());
String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(
gByExpr.getChild(1).getText());
gByRR.put(tab_alias, col_alias, colInfo);
} else if ( gByExpr.getType() == HiveParser.TOK_TABLE_OR_COL ) {
String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr
.getChild(0).getText());
String tab_alias = null;
/*
* If the input to the GBy has a tab alias for the column, then add an entry
* based on that tab_alias.
* For e.g. this query:
* select b.x, count(*) from t1 b group by x
* needs (tab_alias=b, col_alias=x) in the GBy RR.
* tab_alias=b comes from looking at the RowResolver that is the ancestor
* before any GBy/ReduceSinks added for the GBY operation.
*/
Operator<? extends OperatorDesc> parent = reduceSinkOp;
while ( parent instanceof ReduceSinkOperator ||
parent instanceof GroupByOperator ) {
parent = parent.getParentOperators().get(0);
}
RowResolver parentRR = opParseCtx.get(parent).getRowResolver();
try {
ColumnInfo pColInfo = parentRR.get(tab_alias, col_alias);
tab_alias = pColInfo == null ? null : pColInfo.getTabAlias();
} catch(SemanticException se) {
}
gByRR.put(tab_alias, col_alias, colInfo);
}
}
private WriteEntity.WriteType determineWriteType(LoadTableDesc ltd, boolean isNonNativeTable) {
// Don't know the characteristics of non-native tables,
// and don't have a rational way to guess, so assume the most
// conservative case.
if (isNonNativeTable) return WriteEntity.WriteType.INSERT_OVERWRITE;
else return (ltd.getReplace() ? WriteEntity.WriteType.INSERT_OVERWRITE :
WriteEntity.WriteType.INSERT);
}
/**** Temporary Place Holder For Optiq plan Gen, Optimizer ****/
/*
* Entry point to Optimizations using Optiq.
*/
// TODO: Extend QP to indicate LV, Multi Insert, Cubes, Rollups...
private boolean canHandleQuery() {
boolean runOptiqPlanner = false;
if ((queryProperties.getJoinCount() < HiveConf.getIntVar(conf,
HiveConf.ConfVars.HIVE_CBO_MAX_JOINS_SUPPORTED))
&& (queryProperties.getOuterJoinCount() == 0)
&& !queryProperties.hasClusterBy()
&& !queryProperties.hasDistributeBy()
&& !queryProperties.hasSortBy()
&& !queryProperties.hasWindowing()
&& !queryProperties.hasPTF()
&& !queryProperties.usesScript()
&& !queryProperties.hasMultiDestQuery()
&& !queryProperties.hasFilterWithSubQuery()) {
runOptiqPlanner = true;
}
return runOptiqPlanner;
}
private class OptiqBasedPlanner implements Frameworks.PlannerAction<RelNode> {
RelOptCluster m_cluster;
RelOptSchema m_relOptSchema;
SchemaPlus m_rootSchema;
SemanticException m_semanticException;
// TODO: Do we need to keep track of RR, ColNameToPosMap for every op or
// just last one.
LinkedHashMap<RelNode, RowResolver> m_relToHiveRR = new LinkedHashMap<RelNode, RowResolver>();
LinkedHashMap<RelNode, ImmutableMap<String, Integer>> m_relToHiveColNameOptiqPosMap = new LinkedHashMap<RelNode, ImmutableMap<String, Integer>>();
private ASTNode getOptimizedAST() throws SemanticException {
ASTNode optiqOptimizedAST = null;
RelNode optimizedOptiqPlan = null;
try {
optimizedOptiqPlan = Frameworks.withPlanner(this);
} catch (Exception e) {
if (m_semanticException != null)
throw m_semanticException;
else
throw new RuntimeException(e);
}
optiqOptimizedAST = ASTConverter
.convert(optimizedOptiqPlan, resultSchema);
return optiqOptimizedAST;
}
@Override
public RelNode apply(RelOptCluster cluster, RelOptSchema relOptSchema,
SchemaPlus rootSchema) {
RelOptPlanner planner = HiveVolcanoPlanner.createPlanner();
/*
* recreate cluster, so that it picks up the additional traitDef
*/
final RelOptQuery query = new RelOptQuery(planner);
final RexBuilder rexBuilder = cluster.getRexBuilder();
cluster = query.createCluster(rexBuilder.getTypeFactory(), rexBuilder);
m_cluster = cluster;
m_relOptSchema = relOptSchema;
m_rootSchema = rootSchema;
RelNode optiqPlan = null;
try {
optiqPlan = genLogicalPlan(qb);
} catch (SemanticException e) {
m_semanticException = e;
throw new RuntimeException(e);
}
List<RelMetadataProvider> list = Lists.newArrayList();
list.add(HiveDefaultRelMetadataProvider.INSTANCE);
planner.registerMetadataProviders(list);
RelMetadataProvider chainedProvider = ChainedRelMetadataProvider.of(list);
cluster.setMetadataProvider(new CachingRelMetadataProvider(
chainedProvider, planner));
planner.addRule(HiveSwapJoinRule.INSTANCE);
planner.addRule(HivePushJoinThroughJoinRule.LEFT);
planner.addRule(HivePushJoinThroughJoinRule.RIGHT);
if (HiveConf.getBoolVar(conf,
HiveConf.ConfVars.HIVE_CBO_PULLPROJECTABOVEJOIN_RULE)) {
planner.addRule(HivePullUpProjectsAboveJoinRule.BOTH_PROJECT);
planner.addRule(HivePullUpProjectsAboveJoinRule.LEFT_PROJECT);
planner.addRule(HivePullUpProjectsAboveJoinRule.RIGHT_PROJECT);
planner.addRule(HiveMergeProjectRule.INSTANCE);
}
RelTraitSet desiredTraits = cluster.traitSetOf(HiveRel.CONVENTION,
RelCollationImpl.EMPTY);
RelNode rootRel = optiqPlan;
if (!optiqPlan.getTraitSet().equals(desiredTraits)) {
rootRel = planner.changeTraits(optiqPlan, desiredTraits);
}
planner.setRoot(rootRel);
return planner.findBestExp();
}
private RelNode genUnionLogicalPlan(String unionalias, String leftalias,
RelNode leftOp, String rightalias, RelNode rightOp) {
return null;
}
private RelNode genJoinRelNode(RelNode leftRel, RelNode rightRel,
JoinType hiveJoinType, ASTNode joinCond) throws SemanticException {
RelNode joinRel = null;
// 1. construct the RowResolver for the new Join Node by combining row
// resolvers from left, right
RowResolver leftRR = this.m_relToHiveRR.get(leftRel);
RowResolver rightRR = this.m_relToHiveRR.get(rightRel);
RowResolver joinRR = null;
if (hiveJoinType != JoinType.LEFTSEMI) {
joinRR = RowResolver.getCombinedRR(leftRR, rightRR);
} else {
joinRR = new RowResolver();
RowResolver.add(joinRR, leftRR, 0);
}
// 2. Construct ExpressionNodeDesc representing Join Condition
RexNode optiqJoinCond = null;
if (joinCond != null) {
Map<ASTNode, ExprNodeDesc> exprNodes = JoinCondnTypeCheckProcFactory
.genExprNode(joinCond, new JoinTypeCheckCtx(leftRR, rightRR));
ExprNodeDesc joinCondnExprNode = exprNodes.get(joinCond);
List<RelNode> inputRels = new ArrayList<RelNode>();
inputRels.add(leftRel);
inputRels.add(rightRel);
optiqJoinCond = RexNodeConverter.convert(m_cluster, joinCondnExprNode,
inputRels, m_relToHiveRR, m_relToHiveColNameOptiqPosMap, false);
} else {
optiqJoinCond = RexNodeConverter.getAlwaysTruePredicate(m_cluster);
}
// 3. Validate that join condition is legal (i.e no function refering to
// both sides of join, only equi join)
// TODO: Join filter handling (only supported for OJ by runtime or is it
// supported for IJ as well)
// 4. Construct Join Rel Node
boolean leftSemiJoin = false;
JoinRelType optiqJoinType;
switch (hiveJoinType) {
case LEFTOUTER:
optiqJoinType = JoinRelType.LEFT;
break;
case RIGHTOUTER:
optiqJoinType = JoinRelType.RIGHT;
break;
case FULLOUTER:
optiqJoinType = JoinRelType.FULL;
break;
case LEFTSEMI:
optiqJoinType = JoinRelType.INNER;
leftSemiJoin = true;
break;
case INNER:
default:
optiqJoinType = JoinRelType.INNER;
break;
}
joinRel = HiveJoinRel.getJoin(m_cluster, leftRel, rightRel,
optiqJoinCond, optiqJoinType, leftSemiJoin);
// 5. Add new JoinRel & its RR to the maps
m_relToHiveColNameOptiqPosMap.put(joinRel,
this.buildHiveToOptiqColumnMap(joinRR, joinRel));
m_relToHiveRR.put(joinRel, joinRR);
return joinRel;
}
/**
* Generate Join Logical Plan Relnode by walking through the join AST.
*
* @param qb
* @param aliasToRel
* Alias(Table/Relation alias) to RelNode; only read and not
* written in to by this method
* @return
* @throws SemanticException
*/
private RelNode genJoinLogicalPlan(ASTNode joinParseTree,
Map<String, RelNode> aliasToRel) throws SemanticException {
RelNode leftRel = null;
RelNode rightRel = null;
JoinType hiveJoinType = null;
if (joinParseTree.getToken().getType() == HiveParser.TOK_UNIQUEJOIN) {
throw new RuntimeException("CBO does not support Unique Join");
}
// 1. Determine Join Type
// TODO: What about TOK_CROSSJOIN, TOK_MAPJOIN
switch (joinParseTree.getToken().getType()) {
case HiveParser.TOK_LEFTOUTERJOIN:
hiveJoinType = JoinType.LEFTOUTER;
break;
case HiveParser.TOK_RIGHTOUTERJOIN:
hiveJoinType = JoinType.RIGHTOUTER;
break;
case HiveParser.TOK_FULLOUTERJOIN:
hiveJoinType = JoinType.FULLOUTER;
break;
case HiveParser.TOK_LEFTSEMIJOIN:
hiveJoinType = JoinType.LEFTSEMI;
break;
default:
hiveJoinType = JoinType.INNER;
break;
}
// 2. Get Left Table Alias
ASTNode left = (ASTNode) joinParseTree.getChild(0);
if ((left.getToken().getType() == HiveParser.TOK_TABREF)
|| (left.getToken().getType() == HiveParser.TOK_SUBQUERY)
|| (left.getToken().getType() == HiveParser.TOK_PTBLFUNCTION)) {
String tableName = getUnescapedUnqualifiedTableName(
(ASTNode) left.getChild(0)).toLowerCase();
String leftTableAlias = left.getChildCount() == 1 ? tableName
: unescapeIdentifier(left.getChild(left.getChildCount() - 1)
.getText().toLowerCase());
// ptf node form is: ^(TOK_PTBLFUNCTION $name $alias?
// partitionTableFunctionSource partitioningSpec? expression*)
// guranteed to have an lias here: check done in processJoin
leftTableAlias = (left.getToken().getType() == HiveParser.TOK_PTBLFUNCTION) ? unescapeIdentifier(left
.getChild(1).getText().toLowerCase())
: leftTableAlias;
leftRel = aliasToRel.get(leftTableAlias);
} else if (isJoinToken(left)) {
leftRel = genJoinLogicalPlan(left, aliasToRel);
} else {
assert (false);
}
// 3. Get Right Table Alias
ASTNode right = (ASTNode) joinParseTree.getChild(1);
if ((right.getToken().getType() == HiveParser.TOK_TABREF)
|| (right.getToken().getType() == HiveParser.TOK_SUBQUERY)
|| (right.getToken().getType() == HiveParser.TOK_PTBLFUNCTION)) {
String tableName = getUnescapedUnqualifiedTableName(
(ASTNode) right.getChild(0)).toLowerCase();
String rightTableAlias = right.getChildCount() == 1 ? tableName
: unescapeIdentifier(right.getChild(right.getChildCount() - 1)
.getText().toLowerCase());
// ptf node form is: ^(TOK_PTBLFUNCTION $name $alias?
// partitionTableFunctionSource partitioningSpec? expression*)
// guranteed to have an lias here: check done in processJoin
rightTableAlias = (right.getToken().getType() == HiveParser.TOK_PTBLFUNCTION) ? unescapeIdentifier(right
.getChild(1).getText().toLowerCase())
: rightTableAlias;
rightRel = aliasToRel.get(rightTableAlias);
} else {
assert (false);
}
// 4. Get Join Condn
ASTNode joinCond = (ASTNode) joinParseTree.getChild(2);
// 5. Create Join rel
return genJoinRelNode(leftRel, rightRel, hiveJoinType, joinCond);
}
private RelNode genTableLogicalPlan(String tableAlias, QB qb) {
RowResolver rr = new RowResolver();
HiveTableScanRel tableRel = null;
try {
// 1. Get Table Alias
String alias_id = getAliasId(tableAlias, qb);
// 2. Get Table Metadata
Table tab = qb.getMetaData().getSrcForAlias(tableAlias);
// 3. Get Table Logical Schema (Row Type)
// NOTE: Table logical schema = Non Partition Cols + Partition Cols +
// Virtual Cols
// 3.1 Add Column info for non partion cols (Object Inspector fields)
StructObjectInspector rowObjectInspector = (StructObjectInspector) tab
.getDeserializer().getObjectInspector();
List<? extends StructField> fields = rowObjectInspector
.getAllStructFieldRefs();
ColumnInfo colInfo;
String colName;
ArrayList<ColumnInfo> cInfoLst = new ArrayList<ColumnInfo>();
for (int i = 0; i < fields.size(); i++) {
colName = fields.get(i).getFieldName();
colInfo = new ColumnInfo(fields.get(i).getFieldName(),
TypeInfoUtils.getTypeInfoFromObjectInspector(fields.get(i)
.getFieldObjectInspector()), tableAlias, false);
colInfo.setSkewedCol((isSkewedCol(tableAlias, qb, colName)) ? true
: false);
rr.put(tableAlias, colName, colInfo);
cInfoLst.add(colInfo);
}
// TODO: Fix this
ArrayList<ColumnInfo> columnsThatNeedsStats = new ArrayList<ColumnInfo>(
cInfoLst);
// 3.2 Add column info corresponding to partition columns
for (FieldSchema part_col : tab.getPartCols()) {
colName = part_col.getName();
colInfo = new ColumnInfo(colName,
TypeInfoFactory.getPrimitiveTypeInfo(part_col.getType()),
tableAlias, true);
rr.put(tableAlias, colName, colInfo);
cInfoLst.add(colInfo);
}
// 3.3 Add column info corresponding to virtual columns
Iterator<VirtualColumn> vcs = VirtualColumn.getRegistry(conf)
.iterator();
while (vcs.hasNext()) {
VirtualColumn vc = vcs.next();
colInfo = new ColumnInfo(vc.getName(), vc.getTypeInfo(), tableAlias,
true, vc.getIsHidden());
rr.put(tableAlias, vc.getName(), colInfo);
cInfoLst.add(colInfo);
}
// 3.4 Build row type from field <type, name>
RelDataType rowType = TypeConverter.getType(m_cluster, rr, null);
// 4. Build RelOptAbstractTable
RelOptHiveTable optTable = new RelOptHiveTable(m_relOptSchema,
tableAlias, rowType, tab, columnsThatNeedsStats);
// 5. Build Hive Table Scan Rel
tableRel = new HiveTableScanRel(m_cluster,
m_cluster.traitSetOf(HiveRel.CONVENTION), optTable, rowType);
// 6. Add Schema(RR) to RelNode-Schema map
ImmutableMap<String, Integer> hiveToOptiqColMap = buildHiveToOptiqColumnMap(
rr, tableRel);
m_relToHiveRR.put(tableRel, rr);
m_relToHiveColNameOptiqPosMap.put(tableRel, hiveToOptiqColMap);
} catch (Exception e) {
throw (new RuntimeException(e));
}
return tableRel;
}
private RelNode genFilterRelNode(ASTNode filterExpr, RelNode srcRel)
throws SemanticException {
ExprNodeDesc filterCondn = genExprNodeDesc(filterExpr,
m_relToHiveRR.get(srcRel));
ImmutableMap<String, Integer> hiveColNameOptiqPosMap = this.m_relToHiveColNameOptiqPosMap
.get(srcRel);
RexNode convertedFilterExpr = new RexNodeConverter(m_cluster,
srcRel.getRowType(), hiveColNameOptiqPosMap, 0, true)
.convert(filterCondn);
RelNode filterRel = new HiveFilterRel(m_cluster,
m_cluster.traitSetOf(HiveRel.CONVENTION), srcRel, convertedFilterExpr);
this.m_relToHiveColNameOptiqPosMap.put(filterRel, hiveColNameOptiqPosMap);
m_relToHiveRR.put(filterRel, m_relToHiveRR.get(srcRel));
m_relToHiveColNameOptiqPosMap.put(filterRel, hiveColNameOptiqPosMap);
return filterRel;
}
private RelNode genFilterLogicalPlan(QB qb, RelNode srcRel)
throws SemanticException {
RelNode filterRel = null;
Iterator<ASTNode> whereClauseIterator = getQBParseInfo(qb)
.getDestToWhereExpr().values().iterator();
if (whereClauseIterator.hasNext()) {
filterRel = genFilterRelNode((ASTNode) whereClauseIterator.next().getChild(0), srcRel);
}
return filterRel;
}
private final Map<String, Aggregation> AGG_MAP = ImmutableMap
.<String, Aggregation> builder()
.put(
"count",
(Aggregation) SqlStdOperatorTable.COUNT)
.put(
"sum",
SqlStdOperatorTable.SUM)
.put(
"min",
SqlStdOperatorTable.MIN)
.put(
"max",
SqlStdOperatorTable.MAX)
.put(
"avg",
SqlStdOperatorTable.AVG)
.put(
"stddev_samp",
SqlFunctionConverter
.hiveAggFunction("stddev_samp"))
.build();
/**
* Class to store GenericUDAF related information.
*/
private class AggInfo {
private final List<ExprNodeDesc> m_aggParams;
private final TypeInfo m_returnType;
private final String m_udfName;
private final boolean m_distinct;
private AggInfo(List<ExprNodeDesc> aggParams, TypeInfo returnType,
String udfName, boolean isDistinct) {
m_aggParams = aggParams;
m_returnType = returnType;
m_udfName = udfName;
m_distinct = isDistinct;
}
}
private AggregateCall convertAgg(AggInfo agg, RelNode input,
List<RexNode> gbChildProjLst, RexNodeConverter converter,
HashMap<RexNode, Integer> rexNodeToPosMap, Integer childProjLstIndx)
throws SemanticException {
final Aggregation aggregation = AGG_MAP.get(agg.m_udfName);
if (aggregation == null) {
throw new AssertionError("agg not found: " + agg.m_udfName);
}
List<Integer> argList = new ArrayList<Integer>();
RelDataType type = TypeConverter.convert(agg.m_returnType,
this.m_cluster.getTypeFactory());
// TODO: Does HQL allows expressions as aggregate args or can it only be
// projections from child?
Integer inputIndx;
RexNode rexNd = null;
for (ExprNodeDesc expr : agg.m_aggParams) {
rexNd = converter.convert(expr);
inputIndx = rexNodeToPosMap.get(rexNd);
if (inputIndx == null) {
gbChildProjLst.add(rexNd);
rexNodeToPosMap.put(rexNd, childProjLstIndx);
inputIndx = childProjLstIndx;
childProjLstIndx++;
}
argList.add(inputIndx);
}
/*
* set the type to the first arg, it there is one; because the RTi set on
* Aggregation call assumes this is the output type.
*/
if (argList.size() > 0) {
RexNode rex = converter.convert(agg.m_aggParams.get(0));
type = rex.getType();
}
return new AggregateCall(aggregation, agg.m_distinct, argList, type, null);
}
private RelNode genGBRelNode(List<ExprNodeDesc> gbExprs,
List<AggInfo> aggInfoLst, RelNode srcRel) throws SemanticException {
RowResolver gbInputRR = this.m_relToHiveRR.get(srcRel);
ArrayList<ColumnInfo> signature = gbInputRR.getRowSchema().getSignature();
ImmutableMap<String, Integer> posMap = this.m_relToHiveColNameOptiqPosMap
.get(srcRel);
RexNodeConverter converter = new RexNodeConverter(this.m_cluster,
srcRel.getRowType(), posMap, 0, false);
final List<RexNode> gbChildProjLst = Lists.newArrayList();
final HashMap<RexNode, Integer> rexNodeToPosMap = new HashMap<RexNode, Integer>();
final BitSet groupSet = new BitSet();
Integer gbIndx = 0;
RexNode rnd;
for (ExprNodeDesc key : gbExprs) {
rnd = converter.convert(key);
gbChildProjLst.add(rnd);
groupSet.set(gbIndx);
rexNodeToPosMap.put(rnd, gbIndx);
gbIndx++;
}
List<AggregateCall> aggregateCalls = Lists.newArrayList();
int i = aggInfoLst.size();
for (AggInfo agg : aggInfoLst) {
aggregateCalls.add(convertAgg(agg, srcRel, gbChildProjLst, converter,
rexNodeToPosMap, gbChildProjLst.size()));
}
RelNode gbInputRel = HiveProjectRel.create(srcRel, gbChildProjLst, null);
HiveRel aggregateRel = null;
try {
aggregateRel = new HiveAggregateRel(m_cluster,
m_cluster.traitSetOf(HiveRel.CONVENTION), gbInputRel, groupSet,
aggregateCalls);
} catch (InvalidRelException e) {
throw new SemanticException(e);
}
return aggregateRel;
}
private void addAlternateGByKeyMappings(ASTNode gByExpr,
ColumnInfo colInfo, RowResolver gByInputRR, RowResolver gByRR) {
if (gByExpr.getType() == HiveParser.DOT
&& gByExpr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL) {
String tab_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr
.getChild(0).getChild(0).getText());
String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr
.getChild(1).getText());
gByRR.put(tab_alias, col_alias, colInfo);
} else if (gByExpr.getType() == HiveParser.TOK_TABLE_OR_COL) {
String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr
.getChild(0).getText());
String tab_alias = null;
/*
* If the input to the GBy has a tab alias for the column, then add an
* entry based on that tab_alias. For e.g. this query: select b.x,
* count(*) from t1 b group by x needs (tab_alias=b, col_alias=x) in the
* GBy RR. tab_alias=b comes from looking at the RowResolver that is the
* ancestor before any GBy/ReduceSinks added for the GBY operation.
*/
try {
ColumnInfo pColInfo = gByInputRR.get(tab_alias, col_alias);
tab_alias = pColInfo == null ? null : pColInfo.getTabAlias();
} catch (SemanticException se) {
}
gByRR.put(tab_alias, col_alias, colInfo);
}
}
/**
* Generate GB plan.
*
* @param qb
* @param srcRel
* @return TODO: 1. Grouping Sets (roll up..)
* @throws SemanticException
*/
private RelNode genGBLogicalPlan(QB qb, RelNode srcRel)
throws SemanticException {
RelNode gbRel = null;
QBParseInfo qbp = getQBParseInfo(qb);
// 1. Gather GB Expressions (AST)
// NOTE: Multi Insert is not supported
String detsClauseName = qbp.getClauseNames().iterator().next();
List<ASTNode> grpByAstExprs = getGroupByForClause(qbp, detsClauseName);
if (grpByAstExprs != null && !grpByAstExprs.isEmpty()) {
// 2. Input, Output Row Resolvers
RowResolver groupByInputRowResolver = this.m_relToHiveRR.get(srcRel);
RowResolver groupByOutputRowResolver = new RowResolver();
groupByOutputRowResolver.setIsExprResolver(true);
// 3. Construct GB Keys (ExprNode)
ArrayList<ExprNodeDesc> gbExprNDescLst = new ArrayList<ExprNodeDesc>();
ArrayList<String> outputColumnNames = new ArrayList<String>();
for (int i = 0; i < grpByAstExprs.size(); ++i) {
ASTNode grpbyExpr = grpByAstExprs.get(i);
Map<ASTNode, ExprNodeDesc> astToExprNDescMap = TypeCheckProcFactory
.genExprNode(grpbyExpr, new TypeCheckCtx(groupByInputRowResolver));
ExprNodeDesc grpbyExprNDesc = astToExprNDescMap.get(grpbyExpr);
if (grpbyExprNDesc == null)
throw new RuntimeException("Invalid Column Reference: "
+ grpbyExpr.dump());
gbExprNDescLst.add(grpbyExprNDesc);
// TODO: Should we use grpbyExprNDesc.getTypeInfo()? what if expr is
// UDF
String field = getColumnInternalName(i);
outputColumnNames.add(field);
ColumnInfo oColInfo = new ColumnInfo(field,
grpbyExprNDesc.getTypeInfo(), null, false);
groupByOutputRowResolver.putExpression(grpbyExpr, oColInfo);
// TODO: Alternate mappings, are they necessary?
addAlternateGByKeyMappings(grpbyExpr, oColInfo,
groupByInputRowResolver, groupByOutputRowResolver);
}
// 4. Construct aggregation function Info
ArrayList<AggInfo> aggregations = new ArrayList<AggInfo>();
HashMap<String, ASTNode> aggregationTrees = qbp
.getAggregationExprsForClause(detsClauseName);
assert (aggregationTrees != null);
int numDistinctUDFs = 0;
for (ASTNode value : aggregationTrees.values()) {
// 4.1 Convert UDF Params to ExprNodeDesc
ArrayList<ExprNodeDesc> aggParameters = new ArrayList<ExprNodeDesc>();
for (int i = 1; i < value.getChildCount(); i++) {
ASTNode paraExpr = (ASTNode) value.getChild(i);
ExprNodeDesc paraExprNode = genExprNodeDesc(paraExpr,
groupByInputRowResolver);
aggParameters.add(paraExprNode);
}
// 4.2 Determine type of UDF
// This is the GenericUDAF name
String aggName = unescapeIdentifier(value.getChild(0).getText());
boolean isDistinct = value.getType() == HiveParser.TOK_FUNCTIONDI;
boolean isAllColumns = value.getType() == HiveParser.TOK_FUNCTIONSTAR;
if (isDistinct) {
numDistinctUDFs++;
}
Mode amode = groupByDescModeToUDAFMode(GroupByDesc.Mode.COMPLETE,
isDistinct);
GenericUDAFEvaluator genericUDAFEvaluator = getGenericUDAFEvaluator(
aggName, aggParameters, value, isDistinct, isAllColumns);
assert (genericUDAFEvaluator != null);
GenericUDAFInfo udaf = getGenericUDAFInfo(genericUDAFEvaluator,
amode, aggParameters);
AggInfo aInfo = new AggInfo(aggParameters, udaf.returnType, aggName,
isDistinct);
aggregations.add(aInfo);
String field = getColumnInternalName(gbExprNDescLst.size()
+ aggregations.size() - 1);
outputColumnNames.add(field);
groupByOutputRowResolver.putExpression(value, new ColumnInfo(field,
aInfo.m_returnType, "", false));
}
gbRel = genGBRelNode(gbExprNDescLst, aggregations, srcRel);
m_relToHiveColNameOptiqPosMap.put(gbRel,
buildHiveToOptiqColumnMap(groupByOutputRowResolver, gbRel));
this.m_relToHiveRR.put(gbRel, groupByOutputRowResolver);
}
return gbRel;
}
private RelNode genOBLogicalPlan(QB qb, RelNode srcRel)
throws SemanticException {
RelNode relToRet = null;
QBParseInfo qbp = getQBParseInfo(qb);
String dest = qbp.getClauseNames().iterator().next();
ASTNode obAST = qbp.getOrderByForClause(dest);
if (obAST != null) {
// 1. OB Expr sanity test
// in strict mode, in the presence of order by, limit must be specified
Integer limit = qb.getParseInfo().getDestLimit(dest);
if (conf.getVar(HiveConf.ConfVars.HIVEMAPREDMODE).equalsIgnoreCase(
"strict")
&& limit == null) {
throw new SemanticException(generateErrorMessage(obAST,
ErrorMsg.NO_LIMIT_WITH_ORDERBY.getMsg()));
}
// 2. Walk through OB exprs and extract field collations and additional
// virtual columns needed
final List<RexNode> newVCLst = new ArrayList<RexNode>();
final List<RelFieldCollation> fieldCollations = Lists.newArrayList();
int fieldIndex = 0;
List<Node> obASTExprLst = obAST.getChildren();
ASTNode obASTExpr;
List<Pair<ASTNode, TypeInfo>> vcASTTypePairs = new ArrayList<Pair<ASTNode, TypeInfo>>();
RowResolver inputRR = m_relToHiveRR.get(srcRel);
RowResolver outputRR = new RowResolver();
RexNode rnd;
RexNodeConverter converter = new RexNodeConverter(m_cluster,
srcRel.getRowType(), m_relToHiveColNameOptiqPosMap.get(srcRel), 0,
false);
int srcRelRecordSz = srcRel.getRowType().getFieldCount();
for (int i = 0; i < obASTExprLst.size(); i++) {
// 2.1 Convert AST Expr to ExprNode
obASTExpr = (ASTNode) obASTExprLst.get(i);
Map<ASTNode, ExprNodeDesc> astToExprNDescMap = TypeCheckProcFactory
.genExprNode(obASTExpr, new TypeCheckCtx(inputRR));
ExprNodeDesc obExprNDesc = astToExprNDescMap.get((ASTNode) obASTExpr
.getChild(0));
if (obExprNDesc == null)
throw new SemanticException("Invalid order by expression: "
+ obASTExpr.toString());
// 2.2 Convert ExprNode to RexNode
rnd = converter.convert(obExprNDesc);
// 2.3 Determine the index of ob expr in child schema
// NOTE: Optiq can not take compound exprs in OB without it being
// present in the child (& hence we add a child Project Rel)
if (rnd instanceof RexInputRef) {
fieldIndex = ((RexInputRef) rnd).getIndex();
} else {
fieldIndex = srcRelRecordSz + newVCLst.size();
newVCLst.add(rnd);
vcASTTypePairs.add(new Pair<ASTNode, TypeInfo>((ASTNode) obASTExpr
.getChild(0), obExprNDesc.getTypeInfo()));
}
// 2.4 Determine the Direction of order by
org.eigenbase.rel.RelFieldCollation.Direction order = RelFieldCollation.Direction.DESCENDING;
if (obASTExpr.getType() == HiveParser.TOK_TABSORTCOLNAMEASC) {
order = RelFieldCollation.Direction.ASCENDING;
}
// 2.5 Add to field collations
fieldCollations.add(new RelFieldCollation(fieldIndex, order));
}
// 3. Add Child Project Rel if needed
RelNode obInputRel = srcRel;
if (!newVCLst.isEmpty()) {
List<RexNode> originalInputRefs = Lists.transform(srcRel.getRowType()
.getFieldList(), new Function<RelDataTypeField, RexNode>() {
public RexNode apply(RelDataTypeField input) {
return new RexInputRef(input.getIndex(), input.getType());
}
});
obInputRel = HiveProjectRel.create(srcRel,
CompositeList.of(originalInputRefs, newVCLst), null);
}
// 4. Construct SortRel
RelTraitSet traitSet = m_cluster.traitSetOf(HiveRel.CONVENTION);
RelCollation canonizedCollation = traitSet.canonize(RelCollationImpl
.of(fieldCollations));
// TODO: Is it better to introduce a
// project on top to restrict VC from showing up in sortRel type
RelNode sortRel = new HiveSortRel(m_cluster, traitSet, obInputRel,
canonizedCollation, null, null);
// 5. Construct OB Parent Rel If needed
// Construct a parent Project if OB has virtual columns(vc) otherwise
// vc would show up in the result
// TODO: If OB is part of sub query & Parent Query select is not of the
// type "select */.*..." then parent project is not needed
relToRet = sortRel;
if (!newVCLst.isEmpty()) {
List<RexNode> obParentRelProjs = Lists.transform(srcRel
.getRowType().getFieldList(),
new Function<RelDataTypeField, RexNode>() {
public RexNode apply(RelDataTypeField input) {
return new RexInputRef(input.getIndex(), input.getType());
}
});
relToRet = HiveProjectRel.create(sortRel, obParentRelProjs, null);
}
// 6. Construct output RR
RowResolver.add(outputRR, inputRR, 0);
// 7. Update the maps
// NOTE: Output RR for SortRel is considered same as its input; we may
// end up not using VC that is present in sort rel. Also note that
// rowtype of sortrel is the type of it child; if child happens to be
// synthetic project that we introduced then that projectrel would
// contain the vc.
ImmutableMap<String, Integer> hiveColNameOptiqPosMap = buildHiveToOptiqColumnMap(
outputRR, relToRet);
m_relToHiveRR.put(relToRet, outputRR);
m_relToHiveColNameOptiqPosMap.put(relToRet, hiveColNameOptiqPosMap);
}
return relToRet;
}
private RelNode genLimitLogicalPlan(QB qb, RelNode srcRel)
throws SemanticException {
HiveRel sortRel = null;
QBParseInfo qbp = getQBParseInfo(qb);
Integer limit = qbp.getDestToLimit().get(
qbp.getClauseNames().iterator().next());
if (limit != null) {
RexNode fetch = m_cluster.getRexBuilder().makeExactLiteral(
BigDecimal.valueOf(limit));
RelTraitSet traitSet = m_cluster.traitSetOf(HiveRel.CONVENTION);
RelCollation canonizedCollation = traitSet
.canonize(RelCollationImpl.EMPTY);
sortRel = new HiveSortRel(m_cluster, traitSet, srcRel,
canonizedCollation, null, fetch);
RowResolver outputRR = new RowResolver();
RowResolver.add(outputRR, m_relToHiveRR.get(srcRel), 0);
ImmutableMap<String, Integer> hiveColNameOptiqPosMap = buildHiveToOptiqColumnMap(
outputRR, sortRel);
m_relToHiveRR.put(sortRel, outputRR);
m_relToHiveColNameOptiqPosMap.put(sortRel, hiveColNameOptiqPosMap);
}
return sortRel;
}
/**
* NOTE: there can only be one select caluse since we don't handle multi
* destination insert.
*
* @throws SemanticException
*/
private RelNode genSelectLogicalPlan(QB qb, RelNode srcRel)
throws SemanticException {
boolean subQuery;
ArrayList<ExprNodeDesc> col_list = new ArrayList<ExprNodeDesc>();
// 1. Get Select Expression List
QBParseInfo qbp = getQBParseInfo(qb);
String selClauseName = qbp.getClauseNames().iterator().next();
ASTNode selExprList = qbp.getSelForClause(selClauseName);
// 2.Row resolvers for input, output
RowResolver out_rwsch = new RowResolver();
ASTNode trfm = null;
Integer pos = Integer.valueOf(0);
RowResolver inputRR = this.m_relToHiveRR.get(srcRel);
// 3. Query Hints
// TODO: Handle Query Hints; currently we ignore them
boolean selectStar = false;
int posn = 0;
boolean hintPresent = (selExprList.getChild(0).getType() == HiveParser.TOK_HINTLIST);
if (hintPresent) {
posn++;
}
// 4. Determine if select corresponds to a subquery
subQuery = qb.getParseInfo().getIsSubQ();
// 4. Bailout if select involves Transform
boolean isInTransform = (selExprList.getChild(posn).getChild(0).getType() == HiveParser.TOK_TRANSFORM);
if (isInTransform) {
throw new RuntimeException("SELECT TRANSFORM not supported");
}
// 5. Bailout if select involves UDTF
ASTNode udtfExpr = (ASTNode) selExprList.getChild(posn).getChild(0);
GenericUDTF genericUDTF = null;
int udtfExprType = udtfExpr.getType();
if (udtfExprType == HiveParser.TOK_FUNCTION
|| udtfExprType == HiveParser.TOK_FUNCTIONSTAR) {
String funcName = TypeCheckProcFactory.DefaultExprProcessor
.getFunctionText(udtfExpr, true);
FunctionInfo fi = FunctionRegistry.getFunctionInfo(funcName);
if (fi != null) {
genericUDTF = fi.getGenericUDTF();
}
if (genericUDTF != null) {
throw new RuntimeException("SELECT UDTF not supported");
}
}
// 6. Iterate over all expression (after SELECT)
ASTNode exprList = selExprList;
int startPosn = posn;
List<String> tabAliasesForAllProjs = getTabAliases(inputRR);
for (int i = startPosn; i < exprList.getChildCount(); ++i) {
// 6.1 child can be EXPR AS ALIAS, or EXPR.
ASTNode child = (ASTNode) exprList.getChild(i);
boolean hasAsClause = (!isInTransform) && (child.getChildCount() == 2);
// 6.2 bail out if it is windowing spec
boolean isWindowSpec = child.getChildCount() == 3 ? (child.getChild(2)
.getType() == HiveParser.TOK_WINDOWSPEC) : false;
if (isWindowSpec)
throw new RuntimeException("Windowing is not supported yet");
// 6.3 EXPR AS (ALIAS,...) parses, but is only allowed for UDTF's
// This check is not needed and invalid when there is a transform b/c
// the
// AST's are slightly different.
if (child.getChildCount() > 2) {
throw new SemanticException(generateErrorMessage(
(ASTNode) child.getChild(2), ErrorMsg.INVALID_AS.getMsg()));
}
ASTNode expr;
String tabAlias;
String colAlias;
// 6.4 Get rid of TOK_SELEXPR
expr = (ASTNode) child.getChild(0);
String[] colRef = getColAlias(child, autogenColAliasPrfxLbl, inputRR,
autogenColAliasPrfxIncludeFuncName, i);
tabAlias = colRef[0];
colAlias = colRef[1];
// 6.5 Build ExprNode corresponding to colums
if (expr.getType() == HiveParser.TOK_ALLCOLREF) {
pos = genColListRegex(".*", expr.getChildCount() == 0 ? null
: getUnescapedName((ASTNode) expr.getChild(0)).toLowerCase(),
expr, col_list, inputRR, pos, out_rwsch, tabAliasesForAllProjs,
subQuery);
selectStar = true;
} else if (expr.getType() == HiveParser.TOK_TABLE_OR_COL
&& !hasAsClause && !inputRR.getIsExprResolver()
&& isRegex(unescapeIdentifier(expr.getChild(0).getText()), conf)) {
// In case the expression is a regex COL.
// This can only happen without AS clause
// We don't allow this for ExprResolver - the Group By case
pos = genColListRegex(unescapeIdentifier(expr.getChild(0).getText()),
null, expr, col_list, inputRR, pos, out_rwsch, tabAliasesForAllProjs,
subQuery);
} else if (expr.getType() == HiveParser.DOT
&& expr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL
&& inputRR.hasTableAlias(unescapeIdentifier(expr.getChild(0)
.getChild(0).getText().toLowerCase())) && !hasAsClause
&& !inputRR.getIsExprResolver()
&& isRegex(unescapeIdentifier(expr.getChild(1).getText()), conf)) {
// In case the expression is TABLE.COL (col can be regex).
// This can only happen without AS clause
// We don't allow this for ExprResolver - the Group By case
pos = genColListRegex(unescapeIdentifier(expr.getChild(1).getText()),
unescapeIdentifier(expr.getChild(0).getChild(0).getText()
.toLowerCase()), expr, col_list, inputRR, pos, out_rwsch,
tabAliasesForAllProjs, subQuery);
} else {
// Case when this is an expression
TypeCheckCtx tcCtx = new TypeCheckCtx(inputRR);
// We allow stateful functions in the SELECT list (but nowhere else)
tcCtx.setAllowStatefulFunctions(true);
ExprNodeDesc exp = genExprNodeDesc(expr, inputRR, tcCtx);
String recommended = recommendName(exp, colAlias);
if (recommended != null && out_rwsch.get(null, recommended) == null) {
colAlias = recommended;
}
col_list.add(exp);
if (subQuery) {
out_rwsch.checkColumn(tabAlias, colAlias);
}
ColumnInfo colInfo = new ColumnInfo(getColumnInternalName(pos),
exp.getWritableObjectInspector(), tabAlias, false);
colInfo
.setSkewedCol((exp instanceof ExprNodeColumnDesc) ? ((ExprNodeColumnDesc) exp)
.isSkewedCol() : false);
out_rwsch.put(tabAlias, colAlias, colInfo);
if (exp instanceof ExprNodeColumnDesc) {
ExprNodeColumnDesc colExp = (ExprNodeColumnDesc) exp;
String[] altMapping = inputRR.getAlternateMappings(colExp
.getColumn());
if (altMapping != null) {
out_rwsch.put(altMapping[0], altMapping[1], colInfo);
}
}
pos = Integer.valueOf(pos.intValue() + 1);
}
}
selectStar = selectStar && exprList.getChildCount() == posn + 1;
// 7. Replace NULL with CAST(NULL AS STRING)
ArrayList<String> columnNames = new ArrayList<String>();
for (int i = 0; i < col_list.size(); i++) {
// Replace NULL with CAST(NULL AS STRING)
if (col_list.get(i) instanceof ExprNodeNullDesc) {
col_list.set(i, new ExprNodeConstantDesc(
TypeInfoFactory.stringTypeInfo, null));
}
columnNames.add(getColumnInternalName(i));
}
// 8. Convert Hive projections to Optiq
List<RexNode> optiqColLst = new ArrayList<RexNode>();
RexNodeConverter rexNodeConv = new RexNodeConverter(m_cluster,
srcRel.getRowType(),
buildHiveColNameToInputPosMap(col_list, inputRR), 0, false);
for (ExprNodeDesc colExpr : col_list) {
optiqColLst.add(rexNodeConv.convert(colExpr));
}
// 9. Construct Hive Project Rel
// 9.1. Prepend column names with '_o_'
/*
* Hive treats names that start with '_c' as internalNames; so change the
* names so we don't run into this issue when converting back to Hive AST.
*/
List<String> oFieldNames = Lists.transform(columnNames,
new Function<String, String>() {
public String apply(String hName) {
return "_o_" + hName;
}
});
// 9.2 Build Optiq Rel Node for project using converted projections & col
// names
HiveRel selRel = HiveProjectRel.create(srcRel, optiqColLst, oFieldNames);
// 10. Keep track of colname-to-posmap && RR for new select
this.m_relToHiveColNameOptiqPosMap.put(selRel,
buildHiveToOptiqColumnMap(out_rwsch, selRel));
this.m_relToHiveRR.put(selRel, out_rwsch);
return selRel;
}
private RelNode genLogicalPlan(QBExpr qbexpr) throws SemanticException {
if (qbexpr.getOpcode() == QBExpr.Opcode.NULLOP) {
return genLogicalPlan(qbexpr.getQB());
}
if (qbexpr.getOpcode() == QBExpr.Opcode.UNION) {
RelNode qbexpr1Ops = genLogicalPlan(qbexpr.getQBExpr1());
RelNode qbexpr2Ops = genLogicalPlan(qbexpr.getQBExpr2());
return genUnionLogicalPlan(qbexpr.getAlias(), qbexpr.getQBExpr1()
.getAlias(), qbexpr1Ops, qbexpr.getQBExpr2().getAlias(), qbexpr2Ops);
}
return null;
}
private RelNode genLogicalPlan(QB qb) throws SemanticException {
RelNode srcRel = null;
RelNode filterRel = null;
RelNode gbRel = null;
RelNode gbHavingRel = null;
RelNode havingRel = null;
RelNode selectRel = null;
RelNode obRel = null;
RelNode limitRel = null;
RelNode rootRel = null;
// First generate all the opInfos for the elements in the from clause
Map<String, RelNode> aliasToRel = new HashMap<String, RelNode>();
// 1. Build Rel For Src (SubQuery, TS, Join)
// 1.1. Recurse over the subqueries to fill the subquery part of the plan
for (String subqAlias : qb.getSubqAliases()) {
QBExpr qbexpr = qb.getSubqForAlias(subqAlias);
aliasToRel.put(subqAlias, genLogicalPlan(qbexpr));
qbexpr.setAlias(subqAlias);
}
// 1.2 Recurse over all the source tables
for (String tableAlias : qb.getTabAliases()) {
RelNode op = genTableLogicalPlan(tableAlias, qb);
aliasToRel.put(tableAlias, op);
}
// 1.3 process join
if (qb.getParseInfo().getJoinExpr() != null) {
srcRel = genJoinLogicalPlan(qb.getParseInfo().getJoinExpr(), aliasToRel);
} else {
// If no join then there should only be either 1 TS or 1 SubQuery
srcRel = aliasToRel.values().iterator().next();
}
// 2. Build Rel for where Clause
filterRel = genFilterLogicalPlan(qb, srcRel);
srcRel = (filterRel == null) ? srcRel : filterRel;
// 3. Build Rel for GB Clause
gbRel = genGBLogicalPlan(qb, srcRel);
srcRel = (gbRel == null) ? srcRel : gbRel;
// 4. Build Rel for GB Having Clause
gbHavingRel = genGBHavingLogicalPlan(qb, srcRel);
srcRel = (gbHavingRel == null) ? srcRel : gbHavingRel;
// 5. Build Rel for Select Clause
selectRel = genSelectLogicalPlan(qb, srcRel);
srcRel = (selectRel == null) ? srcRel : selectRel;
// 6. Incase this QB corresponds to subquery then modify its RR to point
// to subquery alias
// TODO: cleanup this
if (qb.getParseInfo().getAlias() != null) {
RowResolver rr = this.m_relToHiveRR.get(srcRel);
RowResolver newRR = new RowResolver();
String alias = qb.getParseInfo().getAlias();
for (ColumnInfo colInfo : rr.getColumnInfos()) {
String name = colInfo.getInternalName();
String[] tmp = rr.reverseLookup(name);
if ("".equals(tmp[0]) || tmp[1] == null) {
// ast expression is not a valid column name for table
tmp[1] = colInfo.getInternalName();
}
ColumnInfo newCi = new ColumnInfo(colInfo);
newCi.setTabAlias(alias);
newRR.put(alias, tmp[1], newCi);
}
m_relToHiveRR.put(srcRel, newRR);
m_relToHiveColNameOptiqPosMap.put(srcRel,
buildHiveToOptiqColumnMap(newRR, srcRel));
}
// 7. Build Rel for OB Clause
obRel = genOBLogicalPlan(qb, srcRel);
srcRel = (obRel == null) ? srcRel : obRel;
// 8. Build Rel for Limit Clause
limitRel = genLimitLogicalPlan(qb, srcRel);
srcRel = (limitRel == null) ? srcRel : limitRel;
if (LOG.isDebugEnabled()) {
LOG.debug("Created Plan for Query Block " + qb.getId());
}
return srcRel;
}
private RelNode genGBHavingLogicalPlan(QB qb, RelNode srcRel)
throws SemanticException {
RelNode gbFilter = null;
QBParseInfo qbp = getQBParseInfo(qb);
ASTNode havingClause = qbp.getHavingForClause(qbp.getClauseNames()
.iterator().next());
if (havingClause != null)
gbFilter = genFilterRelNode((ASTNode) havingClause.getChild(0), srcRel);
return gbFilter;
}
private ImmutableMap<String, Integer> buildHiveToOptiqColumnMap(
RowResolver rr, RelNode rNode) {
ImmutableMap.Builder<String, Integer> b = new ImmutableMap.Builder<String, Integer>();
int i = 0;
for (ColumnInfo ci : rr.getRowSchema().getSignature()) {
b.put(ci.getInternalName(), rr.getPosition(ci.getInternalName()));
}
return b.build();
}
private ImmutableMap<String, Integer> buildHiveColNameToInputPosMap(
List<ExprNodeDesc> col_list, RowResolver inputRR) {
// Build a map of Hive column Names (ExprNodeColumnDesc Name)
// to the positions of those projections in the input
Map<Integer, ExprNodeDesc> hashCodeTocolumnDescMap = new HashMap<Integer, ExprNodeDesc>();
ExprNodeDescUtils
.getExprNodeColumnDesc(col_list, hashCodeTocolumnDescMap);
ImmutableMap.Builder<String, Integer> hiveColNameToInputPosMapBuilder = new ImmutableMap.Builder<String, Integer>();
String exprNodecolName;
for (ExprNodeDesc exprDesc : hashCodeTocolumnDescMap.values()) {
exprNodecolName = ((ExprNodeColumnDesc) exprDesc).getColumn();
hiveColNameToInputPosMapBuilder.put(exprNodecolName,
inputRR.getPosition(exprNodecolName));
}
return hiveColNameToInputPosMapBuilder.build();
}
private QBParseInfo getQBParseInfo(QB qb) {
QBParseInfo qbp = qb.getParseInfo();
if (qbp.getClauseNames().size() > 1)
throw new RuntimeException("Multi Insert is not supported");
return qbp;
}
private List<String> getTabAliases(RowResolver inputRR) {
List<String> tabAliases = new ArrayList<String>();
for (ColumnInfo ci : inputRR.getColumnInfos()) {
tabAliases.add(ci.getTabAlias());
}
return tabAliases;
}
}
}
| ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.parse;
import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVESTATSDBCLASS;
import java.io.IOException;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.BitSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeSet;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import net.hydromatic.optiq.SchemaPlus;
import net.hydromatic.optiq.tools.Frameworks;
import org.antlr.runtime.tree.Tree;
import org.antlr.runtime.tree.TreeWizard;
import org.antlr.runtime.tree.TreeWizard.ContextVisitor;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.FileUtils;
import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.common.ObjectPair;
import org.apache.hadoop.hive.common.StatsSetupConst;
import org.apache.hadoop.hive.common.StatsSetupConst.StatDB;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.MetaStoreUtils;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.QueryProperties;
import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator;
import org.apache.hadoop.hive.ql.exec.ArchiveUtils;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.FetchTask;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
import org.apache.hadoop.hive.ql.exec.FunctionInfo;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.GroupByOperator;
import org.apache.hadoop.hive.ql.exec.JoinOperator;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.OperatorFactory;
import org.apache.hadoop.hive.ql.exec.RecordReader;
import org.apache.hadoop.hive.ql.exec.RecordWriter;
import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
import org.apache.hadoop.hive.ql.exec.RowSchema;
import org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.exec.UnionOperator;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat;
import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
import org.apache.hadoop.hive.ql.io.NullRowsInputFormat;
import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
import org.apache.hadoop.hive.ql.lib.Dispatcher;
import org.apache.hadoop.hive.ql.lib.GraphWalker;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.metadata.DummyPartition;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
import org.apache.hadoop.hive.ql.optimizer.Optimizer;
import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext;
import org.apache.hadoop.hive.ql.optimizer.optiq.HiveDefaultRelMetadataProvider;
import org.apache.hadoop.hive.ql.optimizer.optiq.Pair;
import org.apache.hadoop.hive.ql.optimizer.optiq.RelOptHiveTable;
import org.apache.hadoop.hive.ql.optimizer.optiq.cost.HiveVolcanoPlanner;
import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveAggregateRel;
import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveFilterRel;
import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveJoinRel;
import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveProjectRel;
import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveRel;
import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveSortRel;
import org.apache.hadoop.hive.ql.optimizer.optiq.reloperators.HiveTableScanRel;
import org.apache.hadoop.hive.ql.optimizer.optiq.rules.HiveMergeProjectRule;
import org.apache.hadoop.hive.ql.optimizer.optiq.rules.HivePullUpProjectsAboveJoinRule;
import org.apache.hadoop.hive.ql.optimizer.optiq.rules.HivePushJoinThroughJoinRule;
import org.apache.hadoop.hive.ql.optimizer.optiq.rules.HiveSwapJoinRule;
import org.apache.hadoop.hive.ql.optimizer.optiq.translator.ASTConverter;
import org.apache.hadoop.hive.ql.optimizer.optiq.translator.RexNodeConverter;
import org.apache.hadoop.hive.ql.optimizer.optiq.translator.SqlFunctionConverter;
import org.apache.hadoop.hive.ql.optimizer.optiq.translator.TypeConverter;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.tableSpec.SpecType;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.OrderExpression;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.OrderSpec;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PTFInputSpec;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PTFQueryInputSpec;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PTFQueryInputType;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitionExpression;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitionSpec;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitionedTableFunctionSpec;
import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitioningSpec;
import org.apache.hadoop.hive.ql.parse.QBSubQuery.SubQueryType;
import org.apache.hadoop.hive.ql.parse.SubQueryUtils.ISubQueryJoinInfo;
import org.apache.hadoop.hive.ql.parse.WindowingSpec.BoundarySpec;
import org.apache.hadoop.hive.ql.parse.WindowingSpec.CurrentRowSpec;
import org.apache.hadoop.hive.ql.parse.WindowingSpec.Direction;
import org.apache.hadoop.hive.ql.parse.WindowingSpec.RangeBoundarySpec;
import org.apache.hadoop.hive.ql.parse.WindowingSpec.ValueBoundarySpec;
import org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowExpressionSpec;
import org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowFrameSpec;
import org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowFunctionSpec;
import org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowSpec;
import org.apache.hadoop.hive.ql.plan.AggregationDesc;
import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
import org.apache.hadoop.hive.ql.plan.CreateTableLikeDesc;
import org.apache.hadoop.hive.ql.plan.CreateViewDesc;
import org.apache.hadoop.hive.ql.plan.DDLWork;
import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeColumnListDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;
import org.apache.hadoop.hive.ql.plan.ExtractDesc;
import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
import org.apache.hadoop.hive.ql.plan.FilterDesc;
import org.apache.hadoop.hive.ql.plan.FilterDesc.sampleDesc;
import org.apache.hadoop.hive.ql.plan.ForwardDesc;
import org.apache.hadoop.hive.ql.plan.GroupByDesc;
import org.apache.hadoop.hive.ql.plan.HiveOperation;
import org.apache.hadoop.hive.ql.plan.JoinCondDesc;
import org.apache.hadoop.hive.ql.plan.JoinDesc;
import org.apache.hadoop.hive.ql.plan.LateralViewForwardDesc;
import org.apache.hadoop.hive.ql.plan.LateralViewJoinDesc;
import org.apache.hadoop.hive.ql.plan.LimitDesc;
import org.apache.hadoop.hive.ql.plan.ListBucketingCtx;
import org.apache.hadoop.hive.ql.plan.LoadFileDesc;
import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
import org.apache.hadoop.hive.ql.plan.MapJoinDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.PTFDesc;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
import org.apache.hadoop.hive.ql.plan.ScriptDesc;
import org.apache.hadoop.hive.ql.plan.SelectDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.hive.ql.plan.TableScanDesc;
import org.apache.hadoop.hive.ql.plan.UDTFDesc;
import org.apache.hadoop.hive.ql.plan.UnionDesc;
import org.apache.hadoop.hive.ql.plan.ptf.OrderExpressionDef;
import org.apache.hadoop.hive.ql.plan.ptf.PTFExpressionDef;
import org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.ResourceType;
import org.apache.hadoop.hive.ql.stats.StatsFactory;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFHash;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.Deserializer;
import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
import org.apache.hadoop.hive.serde2.NullStructSerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.mapred.InputFormat;
import org.eigenbase.rel.AggregateCall;
import org.eigenbase.rel.Aggregation;
import org.eigenbase.rel.InvalidRelException;
import org.eigenbase.rel.JoinRelType;
import org.eigenbase.rel.RelCollation;
import org.eigenbase.rel.RelCollationImpl;
import org.eigenbase.rel.RelFieldCollation;
import org.eigenbase.rel.RelNode;
import org.eigenbase.rel.metadata.CachingRelMetadataProvider;
import org.eigenbase.rel.metadata.ChainedRelMetadataProvider;
import org.eigenbase.rel.metadata.RelMetadataProvider;
import org.eigenbase.relopt.RelOptCluster;
import org.eigenbase.relopt.RelOptPlanner;
import org.eigenbase.relopt.RelOptQuery;
import org.eigenbase.relopt.RelOptSchema;
import org.eigenbase.relopt.RelTraitSet;
import org.eigenbase.reltype.RelDataType;
import org.eigenbase.reltype.RelDataTypeField;
import org.eigenbase.rex.RexBuilder;
import org.eigenbase.rex.RexInputRef;
import org.eigenbase.rex.RexNode;
import org.eigenbase.sql.fun.SqlStdOperatorTable;
import org.eigenbase.util.CompositeList;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
/**
* Implementation of the semantic analyzer. It generates the query plan.
* There are other specific semantic analyzers for some hive operations such as
* DDLSemanticAnalyzer for ddl operations.
*/
public class SemanticAnalyzer extends BaseSemanticAnalyzer {
public static final String DUMMY_DATABASE = "_dummy_database";
public static final String DUMMY_TABLE = "_dummy_table";
private HashMap<TableScanOperator, ExprNodeDesc> opToPartPruner;
private HashMap<TableScanOperator, PrunedPartitionList> opToPartList;
private HashMap<String, Operator<? extends OperatorDesc>> topOps;
private HashMap<String, Operator<? extends OperatorDesc>> topSelOps;
private LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext> opParseCtx;
private List<LoadTableDesc> loadTableWork;
private List<LoadFileDesc> loadFileWork;
private Map<JoinOperator, QBJoinTree> joinContext;
private Map<SMBMapJoinOperator, QBJoinTree> smbMapJoinContext;
private final HashMap<TableScanOperator, Table> topToTable;
private final Map<FileSinkOperator, Table> fsopToTable;
private final List<ReduceSinkOperator> reduceSinkOperatorsAddedByEnforceBucketingSorting;
private final HashMap<TableScanOperator, Map<String, String>> topToTableProps;
private QB qb;
private ASTNode ast;
private int destTableId;
private UnionProcContext uCtx;
List<AbstractMapJoinOperator<? extends MapJoinDesc>> listMapJoinOpsNoReducer;
private HashMap<TableScanOperator, sampleDesc> opToSamplePruner;
private final Map<TableScanOperator, Map<String, ExprNodeDesc>> opToPartToSkewedPruner;
/**
* a map for the split sampling, from ailias to an instance of SplitSample
* that describes percentage and number.
*/
private final HashMap<String, SplitSample> nameToSplitSample;
Map<GroupByOperator, Set<String>> groupOpToInputTables;
Map<String, PrunedPartitionList> prunedPartitions;
private List<FieldSchema> resultSchema;
private CreateViewDesc createVwDesc;
private ArrayList<String> viewsExpanded;
private ASTNode viewSelect;
private final UnparseTranslator unparseTranslator;
private final GlobalLimitCtx globalLimitCtx = new GlobalLimitCtx();
// prefix for column names auto generated by hive
private final String autogenColAliasPrfxLbl;
private final boolean autogenColAliasPrfxIncludeFuncName;
// Keep track of view alias to read entity corresponding to the view
// For eg: for a query like 'select * from V3', where V3 -> V2, V2 -> V1, V1 -> T
// keeps track of aliases for V3, V3:V2, V3:V2:V1.
// This is used when T is added as an input for the query, the parents of T is
// derived from the alias V3:V2:V1:T
private final Map<String, ReadEntity> viewAliasToInput = new HashMap<String, ReadEntity>();
// Max characters when auto generating the column name with func name
private static final int AUTOGEN_COLALIAS_PRFX_MAXLENGTH = 20;
// flag for no scan during analyze ... compute statistics
protected boolean noscan = false;
//flag for partial scan during analyze ... compute statistics
protected boolean partialscan = false;
private volatile boolean runCBO = true;
private volatile boolean disableJoinMerge = false;
/*
* Capture the CTE definitions in a Query.
*/
private final Map<String, ASTNode> aliasToCTEs;
/*
* Used to check recursive CTE invocations. Similar to viewsExpanded
*/
private ArrayList<String> ctesExpanded;
private static class Phase1Ctx {
String dest;
int nextNum;
}
protected SemanticAnalyzer(HiveConf conf, boolean runCBO) throws SemanticException {
this(conf);
this.runCBO = runCBO;
}
public SemanticAnalyzer(HiveConf conf) throws SemanticException {
super(conf);
opToPartPruner = new HashMap<TableScanOperator, ExprNodeDesc>();
opToPartList = new HashMap<TableScanOperator, PrunedPartitionList>();
opToSamplePruner = new HashMap<TableScanOperator, sampleDesc>();
nameToSplitSample = new HashMap<String, SplitSample>();
topOps = new HashMap<String, Operator<? extends OperatorDesc>>();
topSelOps = new HashMap<String, Operator<? extends OperatorDesc>>();
loadTableWork = new ArrayList<LoadTableDesc>();
loadFileWork = new ArrayList<LoadFileDesc>();
opParseCtx = new LinkedHashMap<Operator<? extends OperatorDesc>, OpParseContext>();
joinContext = new HashMap<JoinOperator, QBJoinTree>();
smbMapJoinContext = new HashMap<SMBMapJoinOperator, QBJoinTree>();
topToTable = new HashMap<TableScanOperator, Table>();
fsopToTable = new HashMap<FileSinkOperator, Table>();
reduceSinkOperatorsAddedByEnforceBucketingSorting = new ArrayList<ReduceSinkOperator>();
topToTableProps = new HashMap<TableScanOperator, Map<String, String>>();
destTableId = 1;
uCtx = null;
listMapJoinOpsNoReducer = new ArrayList<AbstractMapJoinOperator<? extends MapJoinDesc>>();
groupOpToInputTables = new HashMap<GroupByOperator, Set<String>>();
prunedPartitions = new HashMap<String, PrunedPartitionList>();
unparseTranslator = new UnparseTranslator(conf);
autogenColAliasPrfxLbl = HiveConf.getVar(conf,
HiveConf.ConfVars.HIVE_AUTOGEN_COLUMNALIAS_PREFIX_LABEL);
autogenColAliasPrfxIncludeFuncName = HiveConf.getBoolVar(conf,
HiveConf.ConfVars.HIVE_AUTOGEN_COLUMNALIAS_PREFIX_INCLUDEFUNCNAME);
queryProperties = new QueryProperties();
opToPartToSkewedPruner = new HashMap<TableScanOperator, Map<String, ExprNodeDesc>>();
aliasToCTEs = new HashMap<String, ASTNode>();
}
@Override
protected void reset() {
super.reset();
loadTableWork.clear();
loadFileWork.clear();
topOps.clear();
topSelOps.clear();
destTableId = 1;
idToTableNameMap.clear();
qb = null;
ast = null;
uCtx = null;
joinContext.clear();
smbMapJoinContext.clear();
opParseCtx.clear();
groupOpToInputTables.clear();
prunedPartitions.clear();
disableJoinMerge = false;
aliasToCTEs.clear();
topToTable.clear();
opToPartPruner.clear();
opToPartList.clear();
opToPartToSkewedPruner.clear();
opToSamplePruner.clear();
nameToSplitSample.clear();
fsopToTable.clear();
resultSchema = null;
createVwDesc = null;
viewsExpanded = null;
viewSelect = null;
ctesExpanded = null;
globalLimitCtx.disableOpt();
viewAliasToInput.clear();
reduceSinkOperatorsAddedByEnforceBucketingSorting.clear();
topToTableProps.clear();
listMapJoinOpsNoReducer.clear();
unparseTranslator.clear();
queryProperties.clear();
outputs.clear();
}
public void initParseCtx(ParseContext pctx) {
opToPartPruner = pctx.getOpToPartPruner();
opToPartList = pctx.getOpToPartList();
opToSamplePruner = pctx.getOpToSamplePruner();
topOps = pctx.getTopOps();
topSelOps = pctx.getTopSelOps();
opParseCtx = pctx.getOpParseCtx();
loadTableWork = pctx.getLoadTableWork();
loadFileWork = pctx.getLoadFileWork();
joinContext = pctx.getJoinContext();
smbMapJoinContext = pctx.getSmbMapJoinContext();
ctx = pctx.getContext();
destTableId = pctx.getDestTableId();
idToTableNameMap = pctx.getIdToTableNameMap();
uCtx = pctx.getUCtx();
listMapJoinOpsNoReducer = pctx.getListMapJoinOpsNoReducer();
qb = pctx.getQB();
groupOpToInputTables = pctx.getGroupOpToInputTables();
prunedPartitions = pctx.getPrunedPartitions();
fetchTask = pctx.getFetchTask();
setLineageInfo(pctx.getLineageInfo());
}
public ParseContext getParseContext() {
return new ParseContext(conf, qb, ast, opToPartPruner, opToPartList, topOps,
topSelOps, opParseCtx, joinContext, smbMapJoinContext, topToTable, topToTableProps,
fsopToTable, loadTableWork,
loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
listMapJoinOpsNoReducer, groupOpToInputTables, prunedPartitions,
opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks,
opToPartToSkewedPruner, viewAliasToInput,
reduceSinkOperatorsAddedByEnforceBucketingSorting,
queryProperties);
}
@SuppressWarnings("nls")
public void doPhase1QBExpr(ASTNode ast, QBExpr qbexpr, String id, String alias)
throws SemanticException {
assert (ast.getToken() != null);
switch (ast.getToken().getType()) {
case HiveParser.TOK_QUERY: {
QB qb = new QB(id, alias, true);
Phase1Ctx ctx_1 = initPhase1Ctx();
doPhase1(ast, qb, ctx_1);
qbexpr.setOpcode(QBExpr.Opcode.NULLOP);
qbexpr.setQB(qb);
}
break;
case HiveParser.TOK_UNION: {
qbexpr.setOpcode(QBExpr.Opcode.UNION);
// query 1
assert (ast.getChild(0) != null);
QBExpr qbexpr1 = new QBExpr(alias + "-subquery1");
doPhase1QBExpr((ASTNode) ast.getChild(0), qbexpr1, id + "-subquery1",
alias + "-subquery1");
qbexpr.setQBExpr1(qbexpr1);
// query 2
assert (ast.getChild(0) != null);
QBExpr qbexpr2 = new QBExpr(alias + "-subquery2");
doPhase1QBExpr((ASTNode) ast.getChild(1), qbexpr2, id + "-subquery2",
alias + "-subquery2");
qbexpr.setQBExpr2(qbexpr2);
}
break;
}
}
private LinkedHashMap<String, ASTNode> doPhase1GetAggregationsFromSelect(
ASTNode selExpr, QB qb, String dest) throws SemanticException {
// Iterate over the selects search for aggregation Trees.
// Use String as keys to eliminate duplicate trees.
LinkedHashMap<String, ASTNode> aggregationTrees = new LinkedHashMap<String, ASTNode>();
List<ASTNode> wdwFns = new ArrayList<ASTNode>();
for (int i = 0; i < selExpr.getChildCount(); ++i) {
ASTNode function = (ASTNode) selExpr.getChild(i).getChild(0);
doPhase1GetAllAggregations(function, aggregationTrees, wdwFns);
}
// window based aggregations are handled differently
for (ASTNode wdwFn : wdwFns) {
WindowingSpec spec = qb.getWindowingSpec(dest);
if(spec == null) {
queryProperties.setHasWindowing(true);
spec = new WindowingSpec();
qb.addDestToWindowingSpec(dest, spec);
}
HashMap<String, ASTNode> wExprsInDest = qb.getParseInfo().getWindowingExprsForClause(dest);
int wColIdx = spec.getWindowExpressions() == null ? 0 : spec.getWindowExpressions().size();
WindowFunctionSpec wFnSpec = processWindowFunction(wdwFn,
(ASTNode)wdwFn.getChild(wdwFn.getChildCount()-1));
// If this is a duplicate invocation of a function; don't add to WindowingSpec.
if ( wExprsInDest != null &&
wExprsInDest.containsKey(wFnSpec.getExpression().toStringTree())) {
continue;
}
wFnSpec.setAlias("_wcol" + wColIdx);
spec.addWindowFunction(wFnSpec);
qb.getParseInfo().addWindowingExprToClause(dest, wFnSpec.getExpression());
}
return aggregationTrees;
}
private void doPhase1GetColumnAliasesFromSelect(
ASTNode selectExpr, QBParseInfo qbp) {
for (int i = 0; i < selectExpr.getChildCount(); ++i) {
ASTNode selExpr = (ASTNode) selectExpr.getChild(i);
if ((selExpr.getToken().getType() == HiveParser.TOK_SELEXPR)
&& (selExpr.getChildCount() == 2)) {
String columnAlias = unescapeIdentifier(selExpr.getChild(1).getText());
qbp.setExprToColumnAlias((ASTNode) selExpr.getChild(0), columnAlias);
}
}
}
/**
* DFS-scan the expressionTree to find all aggregation subtrees and put them
* in aggregations.
*
* @param expressionTree
* @param aggregations
* the key to the HashTable is the toStringTree() representation of
* the aggregation subtree.
* @throws SemanticException
*/
private void doPhase1GetAllAggregations(ASTNode expressionTree,
HashMap<String, ASTNode> aggregations, List<ASTNode> wdwFns) throws SemanticException {
int exprTokenType = expressionTree.getToken().getType();
if (exprTokenType == HiveParser.TOK_FUNCTION
|| exprTokenType == HiveParser.TOK_FUNCTIONDI
|| exprTokenType == HiveParser.TOK_FUNCTIONSTAR) {
assert (expressionTree.getChildCount() != 0);
if (expressionTree.getChild(expressionTree.getChildCount()-1).getType()
== HiveParser.TOK_WINDOWSPEC) {
wdwFns.add(expressionTree);
return;
}
if (expressionTree.getChild(0).getType() == HiveParser.Identifier) {
String functionName = unescapeIdentifier(expressionTree.getChild(0)
.getText());
if(FunctionRegistry.impliesOrder(functionName)) {
throw new SemanticException(ErrorMsg.MISSING_OVER_CLAUSE.getMsg(functionName));
}
if (FunctionRegistry.getGenericUDAFResolver(functionName) != null) {
if(containsLeadLagUDF(expressionTree)) {
throw new SemanticException(ErrorMsg.MISSING_OVER_CLAUSE.getMsg(functionName));
}
aggregations.put(expressionTree.toStringTree(), expressionTree);
FunctionInfo fi = FunctionRegistry.getFunctionInfo(functionName);
if (!fi.isNative()) {
unparseTranslator.addIdentifierTranslation((ASTNode) expressionTree
.getChild(0));
}
return;
}
}
}
for (int i = 0; i < expressionTree.getChildCount(); i++) {
doPhase1GetAllAggregations((ASTNode) expressionTree.getChild(i),
aggregations, wdwFns);
}
}
private List<ASTNode> doPhase1GetDistinctFuncExprs(
HashMap<String, ASTNode> aggregationTrees) throws SemanticException {
List<ASTNode> exprs = new ArrayList<ASTNode>();
for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
ASTNode value = entry.getValue();
assert (value != null);
if (value.getToken().getType() == HiveParser.TOK_FUNCTIONDI) {
exprs.add(value);
}
}
return exprs;
}
public static String generateErrorMessage(ASTNode ast, String message) {
StringBuilder sb = new StringBuilder();
sb.append(ast.getLine());
sb.append(":");
sb.append(ast.getCharPositionInLine());
sb.append(" ");
sb.append(message);
sb.append(". Error encountered near token '");
sb.append(ErrorMsg.getText(ast));
sb.append("'");
return sb.toString();
}
/**
* Goes though the tabref tree and finds the alias for the table. Once found,
* it records the table name-> alias association in aliasToTabs. It also makes
* an association from the alias to the table AST in parse info.
*
* @return the alias of the table
*/
private String processTable(QB qb, ASTNode tabref) throws SemanticException {
// For each table reference get the table name
// and the alias (if alias is not present, the table name
// is used as an alias)
int aliasIndex = 0;
int propsIndex = -1;
int tsampleIndex = -1;
int ssampleIndex = -1;
for (int index = 1; index < tabref.getChildCount(); index++) {
ASTNode ct = (ASTNode) tabref.getChild(index);
if (ct.getToken().getType() == HiveParser.TOK_TABLEBUCKETSAMPLE) {
tsampleIndex = index;
} else if (ct.getToken().getType() == HiveParser.TOK_TABLESPLITSAMPLE) {
ssampleIndex = index;
} else if (ct.getToken().getType() == HiveParser.TOK_TABLEPROPERTIES) {
propsIndex = index;
} else {
aliasIndex = index;
}
}
ASTNode tableTree = (ASTNode) (tabref.getChild(0));
String tabIdName = getUnescapedName(tableTree);
String alias;
if (aliasIndex != 0) {
alias = unescapeIdentifier(tabref.getChild(aliasIndex).getText());
}
else {
alias = getUnescapedUnqualifiedTableName(tableTree);
}
if (propsIndex >= 0) {
Tree propsAST = tabref.getChild(propsIndex);
Map<String, String> props = DDLSemanticAnalyzer.getProps((ASTNode) propsAST.getChild(0));
qb.setTabProps(alias, props);
}
// If the alias is already there then we have a conflict
if (qb.exists(alias)) {
throw new SemanticException(ErrorMsg.AMBIGUOUS_TABLE_ALIAS.getMsg(tabref
.getChild(aliasIndex)));
}
if (tsampleIndex >= 0) {
ASTNode sampleClause = (ASTNode) tabref.getChild(tsampleIndex);
ArrayList<ASTNode> sampleCols = new ArrayList<ASTNode>();
if (sampleClause.getChildCount() > 2) {
for (int i = 2; i < sampleClause.getChildCount(); i++) {
sampleCols.add((ASTNode) sampleClause.getChild(i));
}
}
// TODO: For now only support sampling on up to two columns
// Need to change it to list of columns
if (sampleCols.size() > 2) {
throw new SemanticException(generateErrorMessage(
(ASTNode) tabref.getChild(0),
ErrorMsg.SAMPLE_RESTRICTION.getMsg()));
}
qb.getParseInfo().setTabSample(
alias,
new TableSample(
unescapeIdentifier(sampleClause.getChild(0).getText()),
unescapeIdentifier(sampleClause.getChild(1).getText()),
sampleCols));
if (unparseTranslator.isEnabled()) {
for (ASTNode sampleCol : sampleCols) {
unparseTranslator.addIdentifierTranslation((ASTNode) sampleCol
.getChild(0));
}
}
} else if (ssampleIndex >= 0) {
ASTNode sampleClause = (ASTNode) tabref.getChild(ssampleIndex);
Tree type = sampleClause.getChild(0);
Tree numerator = sampleClause.getChild(1);
String value = unescapeIdentifier(numerator.getText());
SplitSample sample;
if (type.getType() == HiveParser.TOK_PERCENT) {
assertCombineInputFormat(numerator, "Percentage");
Double percent = Double.valueOf(value).doubleValue();
if (percent < 0 || percent > 100) {
throw new SemanticException(generateErrorMessage((ASTNode) numerator,
"Sampling percentage should be between 0 and 100"));
}
int seedNum = conf.getIntVar(ConfVars.HIVESAMPLERANDOMNUM);
sample = new SplitSample(percent, seedNum);
} else if (type.getType() == HiveParser.TOK_ROWCOUNT) {
sample = new SplitSample(Integer.valueOf(value));
} else {
assert type.getType() == HiveParser.TOK_LENGTH;
assertCombineInputFormat(numerator, "Total Length");
long length = Integer.valueOf(value.substring(0, value.length() - 1));
char last = value.charAt(value.length() - 1);
if (last == 'k' || last == 'K') {
length <<= 10;
} else if (last == 'm' || last == 'M') {
length <<= 20;
} else if (last == 'g' || last == 'G') {
length <<= 30;
}
int seedNum = conf.getIntVar(ConfVars.HIVESAMPLERANDOMNUM);
sample = new SplitSample(length, seedNum);
}
String alias_id = getAliasId(alias, qb);
nameToSplitSample.put(alias_id, sample);
}
// Insert this map into the stats
qb.setTabAlias(alias, tabIdName);
qb.addAlias(alias);
qb.getParseInfo().setSrcForAlias(alias, tableTree);
unparseTranslator.addTableNameTranslation(tableTree, SessionState.get().getCurrentDatabase());
if (aliasIndex != 0) {
unparseTranslator.addIdentifierTranslation((ASTNode) tabref
.getChild(aliasIndex));
}
return alias;
}
private void assertCombineInputFormat(Tree numerator, String message) throws SemanticException {
String inputFormat = conf.getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez") ?
HiveConf.getVar(conf, HiveConf.ConfVars.HIVETEZINPUTFORMAT):
HiveConf.getVar(conf, HiveConf.ConfVars.HIVEINPUTFORMAT);
if (!inputFormat.equals(CombineHiveInputFormat.class.getName())) {
throw new SemanticException(generateErrorMessage((ASTNode) numerator,
message + " sampling is not supported in " + inputFormat));
}
}
private String processSubQuery(QB qb, ASTNode subq) throws SemanticException {
// This is a subquery and must have an alias
if (subq.getChildCount() != 2) {
throw new SemanticException(ErrorMsg.NO_SUBQUERY_ALIAS.getMsg(subq));
}
ASTNode subqref = (ASTNode) subq.getChild(0);
String alias = unescapeIdentifier(subq.getChild(1).getText());
// Recursively do the first phase of semantic analysis for the subquery
QBExpr qbexpr = new QBExpr(alias);
doPhase1QBExpr(subqref, qbexpr, qb.getId(), alias);
// If the alias is already there then we have a conflict
if (qb.exists(alias)) {
throw new SemanticException(ErrorMsg.AMBIGUOUS_TABLE_ALIAS.getMsg(subq
.getChild(1)));
}
// Insert this map into the stats
qb.setSubqAlias(alias, qbexpr);
qb.addAlias(alias);
unparseTranslator.addIdentifierTranslation((ASTNode) subq.getChild(1));
return alias;
}
/*
* Phase1: hold onto any CTE definitions in aliasToCTE.
* CTE definitions are global to the Query.
*/
private void processCTE(QB qb, ASTNode ctes) throws SemanticException {
int numCTEs = ctes.getChildCount();
for(int i=0; i <numCTEs; i++) {
ASTNode cte = (ASTNode) ctes.getChild(i);
ASTNode cteQry = (ASTNode) cte.getChild(0);
String alias = unescapeIdentifier(cte.getChild(1).getText());
String qName = qb.getId() == null ? "" : qb.getId() + ":";
qName += alias.toLowerCase();
if ( aliasToCTEs.containsKey(qName)) {
throw new SemanticException(ErrorMsg.AMBIGUOUS_TABLE_ALIAS.getMsg(cte.getChild(1)));
}
aliasToCTEs.put(qName, cteQry);
}
}
/*
* We allow CTE definitions in views. So we can end up with a hierarchy of CTE definitions:
* - at the top level of a query statement
* - where a view is referenced.
* - views may refer to other views.
*
* The scoping rules we use are: to search for a CTE from the current QB outwards. In order to
* disambiguate between CTES are different levels we qualify(prefix) them with the id of the QB
* they appear in when adding them to the <code>aliasToCTEs</code> map.
*
*/
private ASTNode findCTEFromName(QB qb, String cteName) {
/*
* When saving a view definition all table references in the AST are qualified; including CTE references.
* Where as CTE definitions have no DB qualifier; so we strip out the DB qualifier before searching in
* <code>aliasToCTEs</code> map.
*/
String currDB = SessionState.get().getCurrentDatabase();
if ( currDB != null && cteName.startsWith(currDB) &&
cteName.length() > currDB.length() &&
cteName.charAt(currDB.length()) == '.' ) {
cteName = cteName.substring(currDB.length() + 1);
}
StringBuffer qId = new StringBuffer();
if (qb.getId() != null) {
qId.append(qb.getId());
}
while (qId.length() > 0) {
String nm = qId + ":" + cteName;
if (aliasToCTEs.containsKey(nm)) {
return aliasToCTEs.get(nm);
}
int lastIndex = qId.lastIndexOf(":");
lastIndex = lastIndex < 0 ? 0 : lastIndex;
qId.setLength(lastIndex);
}
return aliasToCTEs.get(cteName);
}
/*
* If a CTE is referenced in a QueryBlock:
* - add it as a SubQuery for now.
* - SQ.alias is the alias used in QB. (if no alias is specified,
* it used the CTE name. Works just like table references)
* - Adding SQ done by:
* - copying AST of CTE
* - setting ASTOrigin on cloned AST.
* - trigger phase 1 on new QBExpr.
* - update QB data structs: remove this as a table reference, move it to a SQ invocation.
*/
private void addCTEAsSubQuery(QB qb, String cteName, String cteAlias) throws SemanticException {
cteAlias = cteAlias == null ? cteName : cteAlias;
ASTNode cteQryNode = findCTEFromName(qb, cteName);
QBExpr cteQBExpr = new QBExpr(cteAlias);
String cteText = ctx.getTokenRewriteStream().toString(
cteQryNode.getTokenStartIndex(), cteQryNode.getTokenStopIndex());
final ASTNodeOrigin cteOrigin = new ASTNodeOrigin("CTE", cteName,
cteText, cteAlias, cteQryNode);
cteQryNode = (ASTNode) ParseDriver.adaptor.dupTree(cteQryNode);
SubQueryUtils.setOriginDeep(cteQryNode, cteOrigin);
doPhase1QBExpr(cteQryNode, cteQBExpr, qb.getId(), cteAlias);
qb.rewriteCTEToSubq(cteAlias, cteName, cteQBExpr);
}
private boolean isJoinToken(ASTNode node) {
if ((node.getToken().getType() == HiveParser.TOK_JOIN)
|| (node.getToken().getType() == HiveParser.TOK_CROSSJOIN)
|| (node.getToken().getType() == HiveParser.TOK_LEFTOUTERJOIN)
|| (node.getToken().getType() == HiveParser.TOK_RIGHTOUTERJOIN)
|| (node.getToken().getType() == HiveParser.TOK_FULLOUTERJOIN)
|| (node.getToken().getType() == HiveParser.TOK_LEFTSEMIJOIN)
|| (node.getToken().getType() == HiveParser.TOK_UNIQUEJOIN)) {
return true;
}
return false;
}
/**
* Given the AST with TOK_JOIN as the root, get all the aliases for the tables
* or subqueries in the join.
*
* @param qb
* @param join
* @throws SemanticException
*/
@SuppressWarnings("nls")
private void processJoin(QB qb, ASTNode join) throws SemanticException {
int numChildren = join.getChildCount();
if ((numChildren != 2) && (numChildren != 3)
&& join.getToken().getType() != HiveParser.TOK_UNIQUEJOIN) {
throw new SemanticException(generateErrorMessage(join,
"Join with multiple children"));
}
for (int num = 0; num < numChildren; num++) {
ASTNode child = (ASTNode) join.getChild(num);
if (child.getToken().getType() == HiveParser.TOK_TABREF) {
processTable(qb, child);
} else if (child.getToken().getType() == HiveParser.TOK_SUBQUERY) {
processSubQuery(qb, child);
} else if (child.getToken().getType() == HiveParser.TOK_PTBLFUNCTION) {
queryProperties.setHasPTF(true);
processPTF(qb, child);
PTFInvocationSpec ptfInvocationSpec = qb.getPTFInvocationSpec(child);
String inputAlias = ptfInvocationSpec == null ? null :
ptfInvocationSpec.getFunction().getAlias();;
if ( inputAlias == null ) {
throw new SemanticException(generateErrorMessage(child,
"PTF invocation in a Join must have an alias"));
}
} else if (child.getToken().getType() == HiveParser.TOK_LATERAL_VIEW ||
child.getToken().getType() == HiveParser.TOK_LATERAL_VIEW_OUTER) {
// SELECT * FROM src1 LATERAL VIEW udtf() AS myTable JOIN src2 ...
// is not supported. Instead, the lateral view must be in a subquery
// SELECT * FROM (SELECT * FROM src1 LATERAL VIEW udtf() AS myTable) a
// JOIN src2 ...
throw new SemanticException(ErrorMsg.LATERAL_VIEW_WITH_JOIN
.getMsg(join));
} else if (isJoinToken(child)) {
processJoin(qb, child);
}
}
}
/**
* Given the AST with TOK_LATERAL_VIEW as the root, get the alias for the
* table or subquery in the lateral view and also make a mapping from the
* alias to all the lateral view AST's.
*
* @param qb
* @param lateralView
* @return the alias for the table/subquery
* @throws SemanticException
*/
private String processLateralView(QB qb, ASTNode lateralView)
throws SemanticException {
int numChildren = lateralView.getChildCount();
assert (numChildren == 2);
ASTNode next = (ASTNode) lateralView.getChild(1);
String alias = null;
switch (next.getToken().getType()) {
case HiveParser.TOK_TABREF:
alias = processTable(qb, next);
break;
case HiveParser.TOK_SUBQUERY:
alias = processSubQuery(qb, next);
break;
case HiveParser.TOK_LATERAL_VIEW:
case HiveParser.TOK_LATERAL_VIEW_OUTER:
alias = processLateralView(qb, next);
break;
default:
throw new SemanticException(ErrorMsg.LATERAL_VIEW_INVALID_CHILD
.getMsg(lateralView));
}
alias = alias.toLowerCase();
qb.getParseInfo().addLateralViewForAlias(alias, lateralView);
qb.addAlias(alias);
return alias;
}
/**
* Phase 1: (including, but not limited to):
*
* 1. Gets all the aliases for all the tables / subqueries and makes the
* appropriate mapping in aliasToTabs, aliasToSubq 2. Gets the location of the
* destination and names the clause "inclause" + i 3. Creates a map from a
* string representation of an aggregation tree to the actual aggregation AST
* 4. Creates a mapping from the clause name to the select expression AST in
* destToSelExpr 5. Creates a mapping from a table alias to the lateral view
* AST's in aliasToLateralViews
*
* @param ast
* @param qb
* @param ctx_1
* @throws SemanticException
*/
@SuppressWarnings({"fallthrough", "nls"})
public boolean doPhase1(ASTNode ast, QB qb, Phase1Ctx ctx_1)
throws SemanticException {
boolean phase1Result = true;
QBParseInfo qbp = qb.getParseInfo();
boolean skipRecursion = false;
if (ast.getToken() != null) {
skipRecursion = true;
switch (ast.getToken().getType()) {
case HiveParser.TOK_SELECTDI:
qb.countSelDi();
// fall through
case HiveParser.TOK_SELECT:
qb.countSel();
qbp.setSelExprForClause(ctx_1.dest, ast);
if (((ASTNode) ast.getChild(0)).getToken().getType() == HiveParser.TOK_HINTLIST) {
qbp.setHints((ASTNode) ast.getChild(0));
}
LinkedHashMap<String, ASTNode> aggregations = doPhase1GetAggregationsFromSelect(ast,
qb, ctx_1.dest);
doPhase1GetColumnAliasesFromSelect(ast, qbp);
qbp.setAggregationExprsForClause(ctx_1.dest, aggregations);
qbp.setDistinctFuncExprsForClause(ctx_1.dest,
doPhase1GetDistinctFuncExprs(aggregations));
break;
case HiveParser.TOK_WHERE:
qbp.setWhrExprForClause(ctx_1.dest, ast);
if (!SubQueryUtils.findSubQueries((ASTNode) ast.getChild(0)).isEmpty())
queryProperties.setFilterWithSubQuery(true);
break;
case HiveParser.TOK_INSERT_INTO:
String currentDatabase = SessionState.get().getCurrentDatabase();
String tab_name = getUnescapedName((ASTNode) ast.getChild(0).getChild(0), currentDatabase);
qbp.addInsertIntoTable(tab_name);
// TODO: is this supposed to fall thru?
case HiveParser.TOK_DESTINATION:
ctx_1.dest = "insclause-" + ctx_1.nextNum;
ctx_1.nextNum++;
// is there a insert in the subquery
if (qbp.getIsSubQ()) {
ASTNode ch = (ASTNode) ast.getChild(0);
if ((ch.getToken().getType() != HiveParser.TOK_DIR)
|| (((ASTNode) ch.getChild(0)).getToken().getType() != HiveParser.TOK_TMP_FILE)) {
throw new SemanticException(ErrorMsg.NO_INSERT_INSUBQUERY
.getMsg(ast));
}
}
qbp.setDestForClause(ctx_1.dest, (ASTNode) ast.getChild(0));
if (qbp.getClauseNamesForDest().size() > 1)
queryProperties.setMultiDestQuery(true);
break;
case HiveParser.TOK_FROM:
int child_count = ast.getChildCount();
if (child_count != 1) {
throw new SemanticException(generateErrorMessage(ast,
"Multiple Children " + child_count));
}
// Check if this is a subquery / lateral view
ASTNode frm = (ASTNode) ast.getChild(0);
if (frm.getToken().getType() == HiveParser.TOK_TABREF) {
processTable(qb, frm);
} else if (frm.getToken().getType() == HiveParser.TOK_SUBQUERY) {
processSubQuery(qb, frm);
} else if (frm.getToken().getType() == HiveParser.TOK_LATERAL_VIEW ||
frm.getToken().getType() == HiveParser.TOK_LATERAL_VIEW_OUTER) {
processLateralView(qb, frm);
} else if (isJoinToken(frm)) {
processJoin(qb, frm);
qbp.setJoinExpr(frm);
}else if(frm.getToken().getType() == HiveParser.TOK_PTBLFUNCTION){
queryProperties.setHasPTF(true);
processPTF(qb, frm);
}
break;
case HiveParser.TOK_CLUSTERBY:
// Get the clusterby aliases - these are aliased to the entries in the
// select list
queryProperties.setHasClusterBy(true);
qbp.setClusterByExprForClause(ctx_1.dest, ast);
break;
case HiveParser.TOK_DISTRIBUTEBY:
// Get the distribute by aliases - these are aliased to the entries in
// the
// select list
queryProperties.setHasDistributeBy(true);
qbp.setDistributeByExprForClause(ctx_1.dest, ast);
if (qbp.getClusterByForClause(ctx_1.dest) != null) {
throw new SemanticException(generateErrorMessage(ast,
ErrorMsg.CLUSTERBY_DISTRIBUTEBY_CONFLICT.getMsg()));
} else if (qbp.getOrderByForClause(ctx_1.dest) != null) {
throw new SemanticException(generateErrorMessage(ast,
ErrorMsg.ORDERBY_DISTRIBUTEBY_CONFLICT.getMsg()));
}
break;
case HiveParser.TOK_SORTBY:
// Get the sort by aliases - these are aliased to the entries in the
// select list
queryProperties.setHasSortBy(true);
qbp.setSortByExprForClause(ctx_1.dest, ast);
if (qbp.getClusterByForClause(ctx_1.dest) != null) {
throw new SemanticException(generateErrorMessage(ast,
ErrorMsg.CLUSTERBY_SORTBY_CONFLICT.getMsg()));
} else if (qbp.getOrderByForClause(ctx_1.dest) != null) {
throw new SemanticException(generateErrorMessage(ast,
ErrorMsg.ORDERBY_SORTBY_CONFLICT.getMsg()));
}
break;
case HiveParser.TOK_ORDERBY:
// Get the order by aliases - these are aliased to the entries in the
// select list
queryProperties.setHasOrderBy(true);
qbp.setOrderByExprForClause(ctx_1.dest, ast);
if (qbp.getClusterByForClause(ctx_1.dest) != null) {
throw new SemanticException(generateErrorMessage(ast,
ErrorMsg.CLUSTERBY_ORDERBY_CONFLICT.getMsg()));
}
break;
case HiveParser.TOK_GROUPBY:
case HiveParser.TOK_ROLLUP_GROUPBY:
case HiveParser.TOK_CUBE_GROUPBY:
case HiveParser.TOK_GROUPING_SETS:
// Get the groupby aliases - these are aliased to the entries in the
// select list
queryProperties.setHasGroupBy(true);
if (qbp.getJoinExpr() != null) {
queryProperties.setHasJoinFollowedByGroupBy(true);
}
if (qbp.getSelForClause(ctx_1.dest).getToken().getType() == HiveParser.TOK_SELECTDI) {
throw new SemanticException(generateErrorMessage(ast,
ErrorMsg.SELECT_DISTINCT_WITH_GROUPBY.getMsg()));
}
qbp.setGroupByExprForClause(ctx_1.dest, ast);
skipRecursion = true;
// Rollup and Cubes are syntactic sugar on top of grouping sets
if (ast.getToken().getType() == HiveParser.TOK_ROLLUP_GROUPBY) {
qbp.getDestRollups().add(ctx_1.dest);
} else if (ast.getToken().getType() == HiveParser.TOK_CUBE_GROUPBY) {
qbp.getDestCubes().add(ctx_1.dest);
} else if (ast.getToken().getType() == HiveParser.TOK_GROUPING_SETS) {
qbp.getDestGroupingSets().add(ctx_1.dest);
}
break;
case HiveParser.TOK_HAVING:
qbp.setHavingExprForClause(ctx_1.dest, ast);
qbp.addAggregationExprsForClause(ctx_1.dest,
doPhase1GetAggregationsFromSelect(ast, qb, ctx_1.dest));
break;
case HiveParser.KW_WINDOW:
if (!qb.hasWindowingSpec(ctx_1.dest) ) {
throw new SemanticException(generateErrorMessage(ast,
"Query has no Cluster/Distribute By; but has a Window definition"));
}
handleQueryWindowClauses(qb, ctx_1, ast);
break;
case HiveParser.TOK_LIMIT:
qbp.setDestLimit(ctx_1.dest, new Integer(ast.getChild(0).getText()));
break;
case HiveParser.TOK_ANALYZE:
// Case of analyze command
String table_name = getUnescapedName((ASTNode) ast.getChild(0).getChild(0));
qb.setTabAlias(table_name, table_name);
qb.addAlias(table_name);
qb.getParseInfo().setIsAnalyzeCommand(true);
qb.getParseInfo().setNoScanAnalyzeCommand(this.noscan);
qb.getParseInfo().setPartialScanAnalyzeCommand(this.partialscan);
// Allow analyze the whole table and dynamic partitions
HiveConf.setVar(conf, HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict");
HiveConf.setVar(conf, HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict");
break;
case HiveParser.TOK_UNION:
if (!qbp.getIsSubQ()) {
// this shouldn't happen. The parser should have converted the union to be
// contained in a subquery. Just in case, we keep the error as a fallback.
throw new SemanticException(generateErrorMessage(ast,
ErrorMsg.UNION_NOTIN_SUBQ.getMsg()));
}
skipRecursion = false;
break;
case HiveParser.TOK_INSERT:
ASTNode destination = (ASTNode) ast.getChild(0);
Tree tab = destination.getChild(0);
// Proceed if AST contains partition & If Not Exists
if (destination.getChildCount() == 2 &&
tab.getChildCount() == 2 &&
destination.getChild(1).getType() == HiveParser.TOK_IFNOTEXISTS) {
String tableName = tab.getChild(0).getChild(0).getText();
Tree partitions = tab.getChild(1);
int childCount = partitions.getChildCount();
HashMap<String, String> partition = new HashMap<String, String>();
for (int i = 0; i < childCount; i++) {
String partitionName = partitions.getChild(i).getChild(0).getText();
Tree pvalue = partitions.getChild(i).getChild(1);
if (pvalue == null) {
break;
}
String partitionVal = stripQuotes(pvalue.getText());
partition.put(partitionName, partitionVal);
}
// if it is a dynamic partition throw the exception
if (childCount != partition.size()) {
throw new SemanticException(ErrorMsg.INSERT_INTO_DYNAMICPARTITION_IFNOTEXISTS
.getMsg(partition.toString()));
}
Table table = null;
try {
table = db.getTable(tableName);
} catch (HiveException ex) {
throw new SemanticException(ex);
}
try {
Partition parMetaData = db.getPartition(table, partition, false);
// Check partition exists if it exists skip the overwrite
if (parMetaData != null) {
phase1Result = false;
skipRecursion = true;
LOG.info("Partition already exists so insert into overwrite " +
"skipped for partition : " + parMetaData.toString());
break;
}
} catch (HiveException e) {
LOG.info("Error while getting metadata : ", e);
}
validatePartSpec(table, partition, (ASTNode)tab, conf, false);
}
skipRecursion = false;
break;
case HiveParser.TOK_LATERAL_VIEW:
case HiveParser.TOK_LATERAL_VIEW_OUTER:
// todo: nested LV
assert ast.getChildCount() == 1;
qb.getParseInfo().getDestToLateralView().put(ctx_1.dest, ast);
break;
case HiveParser.TOK_CTE:
processCTE(qb, ast);
break;
default:
skipRecursion = false;
break;
}
}
if (!skipRecursion) {
// Iterate over the rest of the children
int child_count = ast.getChildCount();
for (int child_pos = 0; child_pos < child_count && phase1Result; ++child_pos) {
// Recurse
phase1Result = phase1Result && doPhase1((ASTNode) ast.getChild(child_pos), qb, ctx_1);
}
}
return phase1Result;
}
private void getMetaData(QBExpr qbexpr) throws SemanticException {
getMetaData(qbexpr, null);
}
private void getMetaData(QBExpr qbexpr, ReadEntity parentInput)
throws SemanticException {
if (qbexpr.getOpcode() == QBExpr.Opcode.NULLOP) {
getMetaData(qbexpr.getQB(), parentInput);
} else {
getMetaData(qbexpr.getQBExpr1(), parentInput);
getMetaData(qbexpr.getQBExpr2(), parentInput);
}
}
public Table getTable(TableScanOperator ts) {
return topToTable.get(ts);
}
public void getMetaData(QB qb) throws SemanticException {
getMetaData(qb, null);
}
@SuppressWarnings("nls")
public void getMetaData(QB qb, ReadEntity parentInput) throws SemanticException {
try {
LOG.info("Get metadata for source tables");
// Go over the tables and populate the related structures.
// We have to materialize the table alias list since we might
// modify it in the middle for view rewrite.
List<String> tabAliases = new ArrayList<String>(qb.getTabAliases());
// Keep track of view alias to view name and read entity
// For eg: for a query like 'select * from V3', where V3 -> V2, V2 -> V1, V1 -> T
// keeps track of full view name and read entity corresponding to alias V3, V3:V2, V3:V2:V1.
// This is needed for tracking the dependencies for inputs, along with their parents.
Map<String, ObjectPair<String, ReadEntity>> aliasToViewInfo =
new HashMap<String, ObjectPair<String, ReadEntity>>();
/*
* used to capture view to SQ conversions. This is used to check for
* recursive CTE invocations.
*/
Map<String, String> sqAliasToCTEName = new HashMap<String, String>();
for (String alias : tabAliases) {
String tab_name = qb.getTabNameForAlias(alias);
Table tab = null;
try {
tab = db.getTable(tab_name);
} catch (InvalidTableException ite) {
/*
* if this s a CTE reference:
* Add its AST as a SubQuery to this QB.
*/
ASTNode cteNode = findCTEFromName(qb, tab_name.toLowerCase());
if ( cteNode != null ) {
String cte_name = tab_name.toLowerCase();
if (ctesExpanded.contains(cte_name)) {
throw new SemanticException("Recursive cte " + tab_name +
" detected (cycle: " + StringUtils.join(ctesExpanded, " -> ") +
" -> " + tab_name + ").");
}
addCTEAsSubQuery(qb, cte_name, alias);
sqAliasToCTEName.put(alias, cte_name);
continue;
}
ASTNode src = qb.getParseInfo().getSrcForAlias(alias);
if (null != src) {
throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(src));
} else {
throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(alias));
}
}
// Disallow INSERT INTO on bucketized tables
if (qb.getParseInfo().isInsertIntoTable(tab.getDbName(), tab.getTableName()) &&
tab.getNumBuckets() > 0) {
throw new SemanticException(ErrorMsg.INSERT_INTO_BUCKETIZED_TABLE.
getMsg("Table: " + tab_name));
}
// We check offline of the table, as if people only select from an
// non-existing partition of an offline table, the partition won't
// be added to inputs and validate() won't have the information to
// check the table's offline status.
// TODO: Modify the code to remove the checking here and consolidate
// it in validate()
//
if (tab.isOffline()) {
throw new SemanticException(ErrorMsg.OFFLINE_TABLE_OR_PARTITION.
getMsg("Table " + getUnescapedName(qb.getParseInfo().getSrcForAlias(alias))));
}
if (tab.isView()) {
if (qb.getParseInfo().isAnalyzeCommand()) {
throw new SemanticException(ErrorMsg.ANALYZE_VIEW.getMsg());
}
String fullViewName = tab.getDbName() + "." + tab.getTableName();
// Prevent view cycles
if (viewsExpanded.contains(fullViewName)) {
throw new SemanticException("Recursive view " + fullViewName +
" detected (cycle: " + StringUtils.join(viewsExpanded, " -> ") +
" -> " + fullViewName + ").");
}
replaceViewReferenceWithDefinition(qb, tab, tab_name, alias);
// This is the last time we'll see the Table objects for views, so add it to the inputs
// now
ReadEntity viewInput = new ReadEntity(tab, parentInput);
viewInput = PlanUtils.addInput(inputs, viewInput);
aliasToViewInfo.put(alias, new ObjectPair<String, ReadEntity>(fullViewName, viewInput));
viewAliasToInput.put(getAliasId(alias, qb), viewInput);
continue;
}
if (!InputFormat.class.isAssignableFrom(tab.getInputFormatClass())) {
throw new SemanticException(generateErrorMessage(
qb.getParseInfo().getSrcForAlias(alias),
ErrorMsg.INVALID_INPUT_FORMAT_TYPE.getMsg()));
}
qb.getMetaData().setSrcForAlias(alias, tab);
if (qb.getParseInfo().isAnalyzeCommand()) {
// allow partial partition specification for nonscan since noscan is fast.
tableSpec ts = new tableSpec(db, conf, (ASTNode) ast.getChild(0), true, this.noscan);
if (ts.specType == SpecType.DYNAMIC_PARTITION) { // dynamic partitions
try {
ts.partitions = db.getPartitionsByNames(ts.tableHandle, ts.partSpec);
} catch (HiveException e) {
throw new SemanticException(generateErrorMessage(
qb.getParseInfo().getSrcForAlias(alias),
"Cannot get partitions for " + ts.partSpec), e);
}
}
// validate partial scan command
QBParseInfo qbpi = qb.getParseInfo();
if (qbpi.isPartialScanAnalyzeCommand()) {
Class<? extends InputFormat> inputFormatClass = null;
switch (ts.specType) {
case TABLE_ONLY:
case DYNAMIC_PARTITION:
inputFormatClass = ts.tableHandle.getInputFormatClass();
break;
case STATIC_PARTITION:
inputFormatClass = ts.partHandle.getInputFormatClass();
break;
default:
assert false;
}
// throw a HiveException for formats other than rcfile or orcfile.
if (!(inputFormatClass.equals(RCFileInputFormat.class) || inputFormatClass
.equals(OrcInputFormat.class))) {
throw new SemanticException(ErrorMsg.ANALYZE_TABLE_PARTIALSCAN_NON_RCFILE.getMsg());
}
}
qb.getParseInfo().addTableSpec(alias, ts);
}
}
LOG.info("Get metadata for subqueries");
// Go over the subqueries and getMetaData for these
for (String alias : qb.getSubqAliases()) {
boolean wasView = aliasToViewInfo.containsKey(alias);
boolean wasCTE = sqAliasToCTEName.containsKey(alias);
ReadEntity newParentInput = null;
if (wasView) {
viewsExpanded.add(aliasToViewInfo.get(alias).getFirst());
newParentInput = aliasToViewInfo.get(alias).getSecond();
} else if (wasCTE) {
ctesExpanded.add(sqAliasToCTEName.get(alias));
}
QBExpr qbexpr = qb.getSubqForAlias(alias);
getMetaData(qbexpr, newParentInput);
if (wasView) {
viewsExpanded.remove(viewsExpanded.size() - 1);
} else if (wasCTE) {
ctesExpanded.remove(ctesExpanded.size() - 1);
}
}
RowFormatParams rowFormatParams = new RowFormatParams();
AnalyzeCreateCommonVars shared = new AnalyzeCreateCommonVars();
StorageFormat storageFormat = new StorageFormat();
LOG.info("Get metadata for destination tables");
// Go over all the destination structures and populate the related
// metadata
QBParseInfo qbp = qb.getParseInfo();
for (String name : qbp.getClauseNamesForDest()) {
ASTNode ast = qbp.getDestForClause(name);
switch (ast.getToken().getType()) {
case HiveParser.TOK_TAB: {
tableSpec ts = new tableSpec(db, conf, ast);
if (ts.tableHandle.isView()) {
throw new SemanticException(ErrorMsg.DML_AGAINST_VIEW.getMsg());
}
Class<?> outputFormatClass = ts.tableHandle.getOutputFormatClass();
if (!HiveOutputFormat.class.isAssignableFrom(outputFormatClass)) {
throw new SemanticException(ErrorMsg.INVALID_OUTPUT_FORMAT_TYPE
.getMsg(ast, "The class is " + outputFormatClass.toString()));
}
// tableSpec ts is got from the query (user specified),
// which means the user didn't specify partitions in their query,
// but whether the table itself is partitioned is not know.
if (ts.specType != SpecType.STATIC_PARTITION) {
// This is a table or dynamic partition
qb.getMetaData().setDestForAlias(name, ts.tableHandle);
// has dynamic as well as static partitions
if (ts.partSpec != null && ts.partSpec.size() > 0) {
qb.getMetaData().setPartSpecForAlias(name, ts.partSpec);
}
} else {
// This is a partition
qb.getMetaData().setDestForAlias(name, ts.partHandle);
}
if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
// Set that variable to automatically collect stats during the MapReduce job
qb.getParseInfo().setIsInsertToTable(true);
// Add the table spec for the destination table.
qb.getParseInfo().addTableSpec(ts.tableName.toLowerCase(), ts);
}
break;
}
case HiveParser.TOK_LOCAL_DIR:
case HiveParser.TOK_DIR: {
// This is a dfs file
String fname = stripQuotes(ast.getChild(0).getText());
if ((!qb.getParseInfo().getIsSubQ())
&& (((ASTNode) ast.getChild(0)).getToken().getType() == HiveParser.TOK_TMP_FILE)) {
if (qb.isCTAS()) {
qb.setIsQuery(false);
ctx.setResDir(null);
ctx.setResFile(null);
// allocate a temporary output dir on the location of the table
String tableName = getUnescapedName((ASTNode) ast.getChild(0));
Table newTable = db.newTable(tableName);
Path location;
try {
Warehouse wh = new Warehouse(conf);
location = wh.getDatabasePath(db.getDatabase(newTable.getDbName()));
} catch (MetaException e) {
throw new SemanticException(e);
}
try {
fname = ctx.getExternalTmpPath(
FileUtils.makeQualified(location, conf).toUri()).toString();
} catch (Exception e) {
throw new SemanticException(generateErrorMessage(ast,
"Error creating temporary folder on: " + location.toString()), e);
}
if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
tableSpec ts = new tableSpec(db, conf, this.ast);
// Set that variable to automatically collect stats during the MapReduce job
qb.getParseInfo().setIsInsertToTable(true);
// Add the table spec for the destination table.
qb.getParseInfo().addTableSpec(ts.tableName.toLowerCase(), ts);
}
} else {
qb.setIsQuery(true);
fname = ctx.getMRTmpPath().toString();
ctx.setResDir(new Path(fname));
}
}
qb.getMetaData().setDestForAlias(name, fname,
(ast.getToken().getType() == HiveParser.TOK_DIR));
CreateTableDesc localDirectoryDesc = new CreateTableDesc();
boolean localDirectoryDescIsSet = false;
int numCh = ast.getChildCount();
for (int num = 1; num < numCh ; num++){
ASTNode child = (ASTNode) ast.getChild(num);
if (ast.getChild(num) != null){
switch (child.getToken().getType()) {
case HiveParser.TOK_TABLEROWFORMAT:
rowFormatParams.analyzeRowFormat(shared, child);
localDirectoryDesc.setFieldDelim(rowFormatParams.fieldDelim);
localDirectoryDesc.setLineDelim(rowFormatParams.lineDelim);
localDirectoryDesc.setCollItemDelim(rowFormatParams.collItemDelim);
localDirectoryDesc.setMapKeyDelim(rowFormatParams.mapKeyDelim);
localDirectoryDesc.setFieldEscape(rowFormatParams.fieldEscape);
localDirectoryDesc.setNullFormat(rowFormatParams.nullFormat);
localDirectoryDescIsSet=true;
break;
case HiveParser.TOK_TABLESERIALIZER:
ASTNode serdeChild = (ASTNode) child.getChild(0);
shared.serde = unescapeSQLString(serdeChild.getChild(0).getText());
localDirectoryDesc.setSerName(shared.serde);
localDirectoryDescIsSet=true;
break;
case HiveParser.TOK_TBLSEQUENCEFILE:
case HiveParser.TOK_TBLTEXTFILE:
case HiveParser.TOK_TBLRCFILE:
case HiveParser.TOK_TBLORCFILE:
case HiveParser.TOK_TABLEFILEFORMAT:
storageFormat.fillStorageFormat(child, shared);
localDirectoryDesc.setOutputFormat(storageFormat.outputFormat);
localDirectoryDesc.setSerName(shared.serde);
localDirectoryDescIsSet=true;
break;
}
}
}
if (localDirectoryDescIsSet){
qb.setLocalDirectoryDesc(localDirectoryDesc);
}
break;
}
default:
throw new SemanticException(generateErrorMessage(ast,
"Unknown Token Type " + ast.getToken().getType()));
}
}
} catch (HiveException e) {
// Has to use full name to make sure it does not conflict with
// org.apache.commons.lang.StringUtils
LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e));
throw new SemanticException(e.getMessage(), e);
}
}
private void replaceViewReferenceWithDefinition(QB qb, Table tab,
String tab_name, String alias) throws SemanticException {
ParseDriver pd = new ParseDriver();
ASTNode viewTree;
final ASTNodeOrigin viewOrigin = new ASTNodeOrigin("VIEW", tab.getTableName(),
tab.getViewExpandedText(), alias, qb.getParseInfo().getSrcForAlias(
alias));
try {
String viewText = tab.getViewExpandedText();
// Reparse text, passing null for context to avoid clobbering
// the top-level token stream.
ASTNode tree = pd.parse(viewText, ctx, false);
tree = ParseUtils.findRootNonNullToken(tree);
viewTree = tree;
Dispatcher nodeOriginDispatcher = new Dispatcher() {
@Override
public Object dispatch(Node nd, java.util.Stack<Node> stack,
Object... nodeOutputs) {
((ASTNode) nd).setOrigin(viewOrigin);
return null;
}
};
GraphWalker nodeOriginTagger = new DefaultGraphWalker(
nodeOriginDispatcher);
nodeOriginTagger.startWalking(java.util.Collections
.<Node> singleton(viewTree), null);
} catch (ParseException e) {
// A user could encounter this if a stored view definition contains
// an old SQL construct which has been eliminated in a later Hive
// version, so we need to provide full debugging info to help
// with fixing the view definition.
LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e));
StringBuilder sb = new StringBuilder();
sb.append(e.getMessage());
ErrorMsg.renderOrigin(sb, viewOrigin);
throw new SemanticException(sb.toString(), e);
}
QBExpr qbexpr = new QBExpr(alias);
doPhase1QBExpr(viewTree, qbexpr, qb.getId(), alias);
qb.rewriteViewToSubq(alias, tab_name, qbexpr);
}
private boolean isPresent(String[] list, String elem) {
for (String s : list) {
if (s.toLowerCase().equals(elem)) {
return true;
}
}
return false;
}
/*
* This method is invoked for unqualified column references in join conditions.
* This is passed in the Alias to Operator mapping in the QueryBlock so far.
* We try to resolve the unqualified column against each of the Operator Row Resolvers.
* - if the column is present in only one RowResolver, we treat this as a reference to
* that Operator.
* - if the column resolves with more than one RowResolver, we treat it as an Ambiguous
* reference.
* - if the column doesn't resolve with any RowResolver, we treat this as an Invalid
* reference.
*/
@SuppressWarnings("rawtypes")
private String findAlias(ASTNode columnRef,
Map<String, Operator> aliasToOpInfo) throws SemanticException {
String colName = unescapeIdentifier(columnRef.getChild(0).getText()
.toLowerCase());
String tabAlias = null;
if ( aliasToOpInfo != null ) {
for (Map.Entry<String, Operator> opEntry : aliasToOpInfo.entrySet()) {
Operator op = opEntry.getValue();
RowResolver rr = opParseCtx.get(op).getRowResolver();
ColumnInfo colInfo = rr.get(null, colName);
if (colInfo != null) {
if (tabAlias == null) {
tabAlias = opEntry.getKey();
} else {
throw new SemanticException(
ErrorMsg.AMBIGUOUS_TABLE_ALIAS.getMsg(columnRef.getChild(0)));
}
}
}
}
if ( tabAlias == null ) {
throw new SemanticException(ErrorMsg.INVALID_TABLE_ALIAS.getMsg(columnRef
.getChild(0)));
}
return tabAlias;
}
@SuppressWarnings("nls")
void parseJoinCondPopulateAlias(QBJoinTree joinTree, ASTNode condn,
ArrayList<String> leftAliases, ArrayList<String> rightAliases,
ArrayList<String> fields,
Map<String, Operator> aliasToOpInfo) throws SemanticException {
// String[] allAliases = joinTree.getAllAliases();
switch (condn.getToken().getType()) {
case HiveParser.TOK_TABLE_OR_COL:
String tableOrCol = unescapeIdentifier(condn.getChild(0).getText()
.toLowerCase());
unparseTranslator.addIdentifierTranslation((ASTNode) condn.getChild(0));
if (isPresent(joinTree.getLeftAliases(), tableOrCol)) {
if (!leftAliases.contains(tableOrCol)) {
leftAliases.add(tableOrCol);
}
} else if (isPresent(joinTree.getRightAliases(), tableOrCol)) {
if (!rightAliases.contains(tableOrCol)) {
rightAliases.add(tableOrCol);
}
} else {
tableOrCol = findAlias(condn, aliasToOpInfo);
if (isPresent(joinTree.getLeftAliases(), tableOrCol)) {
if (!leftAliases.contains(tableOrCol)) {
leftAliases.add(tableOrCol);
}
} else {
if (!rightAliases.contains(tableOrCol)) {
rightAliases.add(tableOrCol);
}
}
}
break;
case HiveParser.Identifier:
// it may be a field name, return the identifier and let the caller decide
// whether it is or not
if (fields != null) {
fields
.add(unescapeIdentifier(condn.getToken().getText().toLowerCase()));
}
unparseTranslator.addIdentifierTranslation(condn);
break;
case HiveParser.Number:
case HiveParser.StringLiteral:
case HiveParser.BigintLiteral:
case HiveParser.SmallintLiteral:
case HiveParser.TinyintLiteral:
case HiveParser.DecimalLiteral:
case HiveParser.TOK_STRINGLITERALSEQUENCE:
case HiveParser.TOK_CHARSETLITERAL:
case HiveParser.KW_TRUE:
case HiveParser.KW_FALSE:
break;
case HiveParser.TOK_FUNCTION:
// check all the arguments
for (int i = 1; i < condn.getChildCount(); i++) {
parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(i),
leftAliases, rightAliases, null, aliasToOpInfo);
}
break;
default:
// This is an operator - so check whether it is unary or binary operator
if (condn.getChildCount() == 1) {
parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(0),
leftAliases, rightAliases, null, aliasToOpInfo);
} else if (condn.getChildCount() == 2) {
ArrayList<String> fields1 = null;
// if it is a dot operator, remember the field name of the rhs of the
// left semijoin
if (joinTree.getNoSemiJoin() == false
&& condn.getToken().getType() == HiveParser.DOT) {
// get the semijoin rhs table name and field name
fields1 = new ArrayList<String>();
int rhssize = rightAliases.size();
parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(0),
leftAliases, rightAliases, null, aliasToOpInfo);
String rhsAlias = null;
if (rightAliases.size() > rhssize) { // the new table is rhs table
rhsAlias = rightAliases.get(rightAliases.size() - 1);
}
parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(1),
leftAliases, rightAliases, fields1, aliasToOpInfo);
if (rhsAlias != null && fields1.size() > 0) {
joinTree.addRHSSemijoinColumns(rhsAlias, condn);
}
} else {
parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(0),
leftAliases, rightAliases, null, aliasToOpInfo);
parseJoinCondPopulateAlias(joinTree, (ASTNode) condn.getChild(1),
leftAliases, rightAliases, fields1, aliasToOpInfo);
}
} else {
throw new SemanticException(condn.toStringTree() + " encountered with "
+ condn.getChildCount() + " children");
}
break;
}
}
private void populateAliases(List<String> leftAliases,
List<String> rightAliases, ASTNode condn, QBJoinTree joinTree,
List<String> leftSrc) throws SemanticException {
if ((leftAliases.size() != 0) && (rightAliases.size() != 0)) {
throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_1
.getMsg(condn));
}
if (rightAliases.size() != 0) {
assert rightAliases.size() == 1;
joinTree.getExpressions().get(1).add(condn);
} else if (leftAliases.size() != 0) {
joinTree.getExpressions().get(0).add(condn);
for (String s : leftAliases) {
if (!leftSrc.contains(s)) {
leftSrc.add(s);
}
}
} else {
throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_2
.getMsg(condn));
}
}
/*
* refactored out of the Equality case of parseJoinCondition
* so that this can be recursively called on its left tree in the case when
* only left sources are referenced in a Predicate
*/
void applyEqualityPredicateToQBJoinTree(QBJoinTree joinTree,
JoinType type,
List<String> leftSrc,
ASTNode joinCond,
ASTNode leftCondn,
ASTNode rightCondn,
List<String> leftCondAl1,
List<String> leftCondAl2,
List<String> rightCondAl1,
List<String> rightCondAl2) throws SemanticException {
if (leftCondAl1.size() != 0) {
if ((rightCondAl1.size() != 0)
|| ((rightCondAl1.size() == 0) && (rightCondAl2.size() == 0))) {
if (type.equals(JoinType.LEFTOUTER) ||
type.equals(JoinType.FULLOUTER)) {
if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) {
joinTree.getFilters().get(0).add(joinCond);
} else {
LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS);
joinTree.getFiltersForPushing().get(0).add(joinCond);
}
} else {
/*
* If the rhs references table sources and this QBJoinTree has a leftTree;
* hand it to the leftTree and let it recursively handle it.
* There are 3 cases of passing a condition down:
* 1. The leftSide && rightSide don't contains references to the leftTree's rightAlias
* => pass the lists down as is.
* 2. The leftSide contains refs to the leftTree's rightAlias, the rightSide doesn't
* => switch the leftCondAl1 and leftConAl2 lists and pass down.
* 3. The rightSide contains refs to the leftTree's rightAlias, the leftSide doesn't
* => switch the rightCondAl1 and rightConAl2 lists and pass down.
* 4. In case both contain references to the leftTree's rightAlias
* => we cannot push the condition down.
* 5. If either contain references to both left & right
* => we cannot push forward.
*/
if (rightCondAl1.size() != 0) {
QBJoinTree leftTree = joinTree.getJoinSrc();
List<String> leftTreeLeftSrc = new ArrayList<String>();
if (leftTree != null) {
String leftTreeRightSource = leftTree.getRightAliases() != null &&
leftTree.getRightAliases().length > 0 ?
leftTree.getRightAliases()[0] : null;
boolean leftHasRightReference = false;
for (String r : leftCondAl1) {
if (r.equals(leftTreeRightSource)) {
leftHasRightReference = true;
break;
}
}
boolean rightHasRightReference = false;
for (String r : rightCondAl1) {
if (r.equals(leftTreeRightSource)) {
rightHasRightReference = true;
break;
}
}
boolean pushedDown = false;
if ( !leftHasRightReference && !rightHasRightReference ) {
applyEqualityPredicateToQBJoinTree(leftTree, type, leftTreeLeftSrc,
joinCond, leftCondn, rightCondn,
leftCondAl1, leftCondAl2,
rightCondAl1, rightCondAl2);
pushedDown = true;
} else if ( !leftHasRightReference && rightHasRightReference && rightCondAl1.size() == 1 ) {
applyEqualityPredicateToQBJoinTree(leftTree, type, leftTreeLeftSrc,
joinCond, leftCondn, rightCondn,
leftCondAl1, leftCondAl2,
rightCondAl2, rightCondAl1);
pushedDown = true;
} else if (leftHasRightReference && !rightHasRightReference && leftCondAl1.size() == 1 ) {
applyEqualityPredicateToQBJoinTree(leftTree, type, leftTreeLeftSrc,
joinCond, leftCondn, rightCondn,
leftCondAl2, leftCondAl1,
rightCondAl1, rightCondAl2);
pushedDown = true;
}
if (leftTreeLeftSrc.size() == 1) {
leftTree.setLeftAlias(leftTreeLeftSrc.get(0));
}
if ( pushedDown) {
return;
}
} // leftTree != null
}
joinTree.getFiltersForPushing().get(0).add(joinCond);
}
} else if (rightCondAl2.size() != 0) {
populateAliases(leftCondAl1, leftCondAl2, leftCondn, joinTree,
leftSrc);
populateAliases(rightCondAl1, rightCondAl2, rightCondn, joinTree,
leftSrc);
boolean nullsafe = joinCond.getToken().getType() == HiveParser.EQUAL_NS;
joinTree.getNullSafes().add(nullsafe);
}
} else if (leftCondAl2.size() != 0) {
if ((rightCondAl2.size() != 0)
|| ((rightCondAl1.size() == 0) && (rightCondAl2.size() == 0))) {
if (type.equals(JoinType.RIGHTOUTER)
|| type.equals(JoinType.FULLOUTER)) {
if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) {
joinTree.getFilters().get(1).add(joinCond);
} else {
LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS);
joinTree.getFiltersForPushing().get(1).add(joinCond);
}
} else {
joinTree.getFiltersForPushing().get(1).add(joinCond);
}
} else if (rightCondAl1.size() != 0) {
populateAliases(leftCondAl1, leftCondAl2, leftCondn, joinTree,
leftSrc);
populateAliases(rightCondAl1, rightCondAl2, rightCondn, joinTree,
leftSrc);
boolean nullsafe = joinCond.getToken().getType() == HiveParser.EQUAL_NS;
joinTree.getNullSafes().add(nullsafe);
}
} else if (rightCondAl1.size() != 0) {
if (type.equals(JoinType.LEFTOUTER)
|| type.equals(JoinType.FULLOUTER)) {
if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) {
joinTree.getFilters().get(0).add(joinCond);
} else {
LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS);
joinTree.getFiltersForPushing().get(0).add(joinCond);
}
} else {
joinTree.getFiltersForPushing().get(0).add(joinCond);
}
} else {
if (type.equals(JoinType.RIGHTOUTER)
|| type.equals(JoinType.FULLOUTER)) {
if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) {
joinTree.getFilters().get(1).add(joinCond);
} else {
LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS);
joinTree.getFiltersForPushing().get(1).add(joinCond);
}
} else {
joinTree.getFiltersForPushing().get(1).add(joinCond);
}
}
}
@SuppressWarnings("rawtypes")
private void parseJoinCondition(QBJoinTree joinTree, ASTNode joinCond, List<String> leftSrc,
Map<String, Operator> aliasToOpInfo)
throws SemanticException {
if (joinCond == null) {
return;
}
JoinCond cond = joinTree.getJoinCond()[0];
JoinType type = cond.getJoinType();
parseJoinCondition(joinTree, joinCond, leftSrc, type, aliasToOpInfo);
List<ArrayList<ASTNode>> filters = joinTree.getFilters();
if (type == JoinType.LEFTOUTER || type == JoinType.FULLOUTER) {
joinTree.addFilterMapping(cond.getLeft(), cond.getRight(), filters.get(0).size());
}
if (type == JoinType.RIGHTOUTER || type == JoinType.FULLOUTER) {
joinTree.addFilterMapping(cond.getRight(), cond.getLeft(), filters.get(1).size());
}
}
/**
* Parse the join condition. If the condition is a join condition, throw an
* error if it is not an equality. Otherwise, break it into left and right
* expressions and store in the join tree. If the condition is a join filter,
* add it to the filter list of join tree. The join condition can contains
* conditions on both the left and tree trees and filters on either.
* Currently, we only support equi-joins, so we throw an error if the
* condition involves both subtrees and is not a equality. Also, we only
* support AND i.e ORs are not supported currently as their semantics are not
* very clear, may lead to data explosion and there is no usecase.
*
* @param joinTree
* jointree to be populated
* @param joinCond
* join condition
* @param leftSrc
* left sources
* @throws SemanticException
*/
@SuppressWarnings("rawtypes")
private void parseJoinCondition(QBJoinTree joinTree, ASTNode joinCond,
List<String> leftSrc, JoinType type,
Map<String, Operator> aliasToOpInfo) throws SemanticException {
if (joinCond == null) {
return;
}
switch (joinCond.getToken().getType()) {
case HiveParser.KW_OR:
throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_3
.getMsg(joinCond));
case HiveParser.KW_AND:
parseJoinCondition(joinTree, (ASTNode) joinCond.getChild(0), leftSrc, type, aliasToOpInfo);
parseJoinCondition(joinTree, (ASTNode) joinCond.getChild(1), leftSrc, type, aliasToOpInfo);
break;
case HiveParser.EQUAL_NS:
case HiveParser.EQUAL:
ASTNode leftCondn = (ASTNode) joinCond.getChild(0);
ArrayList<String> leftCondAl1 = new ArrayList<String>();
ArrayList<String> leftCondAl2 = new ArrayList<String>();
parseJoinCondPopulateAlias(joinTree, leftCondn, leftCondAl1, leftCondAl2,
null, aliasToOpInfo);
ASTNode rightCondn = (ASTNode) joinCond.getChild(1);
ArrayList<String> rightCondAl1 = new ArrayList<String>();
ArrayList<String> rightCondAl2 = new ArrayList<String>();
parseJoinCondPopulateAlias(joinTree, rightCondn, rightCondAl1,
rightCondAl2, null, aliasToOpInfo);
// is it a filter or a join condition
// if it is filter see if it can be pushed above the join
// filter cannot be pushed if
// * join is full outer or
// * join is left outer and filter is on left alias or
// * join is right outer and filter is on right alias
if (((leftCondAl1.size() != 0) && (leftCondAl2.size() != 0))
|| ((rightCondAl1.size() != 0) && (rightCondAl2.size() != 0))) {
throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_1
.getMsg(joinCond));
}
applyEqualityPredicateToQBJoinTree(joinTree, type, leftSrc,
joinCond, leftCondn, rightCondn,
leftCondAl1, leftCondAl2,
rightCondAl1, rightCondAl2);
break;
default:
boolean isFunction = (joinCond.getType() == HiveParser.TOK_FUNCTION);
// Create all children
int childrenBegin = (isFunction ? 1 : 0);
ArrayList<ArrayList<String>> leftAlias = new ArrayList<ArrayList<String>>(
joinCond.getChildCount() - childrenBegin);
ArrayList<ArrayList<String>> rightAlias = new ArrayList<ArrayList<String>>(
joinCond.getChildCount() - childrenBegin);
for (int ci = 0; ci < joinCond.getChildCount() - childrenBegin; ci++) {
ArrayList<String> left = new ArrayList<String>();
ArrayList<String> right = new ArrayList<String>();
leftAlias.add(left);
rightAlias.add(right);
}
for (int ci = childrenBegin; ci < joinCond.getChildCount(); ci++) {
parseJoinCondPopulateAlias(joinTree, (ASTNode) joinCond.getChild(ci),
leftAlias.get(ci - childrenBegin), rightAlias.get(ci
- childrenBegin), null, aliasToOpInfo);
}
boolean leftAliasNull = true;
for (ArrayList<String> left : leftAlias) {
if (left.size() != 0) {
leftAliasNull = false;
break;
}
}
boolean rightAliasNull = true;
for (ArrayList<String> right : rightAlias) {
if (right.size() != 0) {
rightAliasNull = false;
break;
}
}
if (!leftAliasNull && !rightAliasNull) {
throw new SemanticException(ErrorMsg.INVALID_JOIN_CONDITION_1
.getMsg(joinCond));
}
if (!leftAliasNull) {
if (type.equals(JoinType.LEFTOUTER)
|| type.equals(JoinType.FULLOUTER)) {
if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) {
joinTree.getFilters().get(0).add(joinCond);
} else {
LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS);
joinTree.getFiltersForPushing().get(0).add(joinCond);
}
} else {
joinTree.getFiltersForPushing().get(0).add(joinCond);
}
} else {
if (type.equals(JoinType.RIGHTOUTER)
|| type.equals(JoinType.FULLOUTER)) {
if (conf.getBoolVar(HiveConf.ConfVars.HIVEOUTERJOINSUPPORTSFILTERS)) {
joinTree.getFilters().get(1).add(joinCond);
} else {
LOG.warn(ErrorMsg.OUTERJOIN_USES_FILTERS);
joinTree.getFiltersForPushing().get(1).add(joinCond);
}
} else {
joinTree.getFiltersForPushing().get(1).add(joinCond);
}
}
break;
}
}
@SuppressWarnings("rawtypes")
private void extractJoinCondsFromWhereClause(QBJoinTree joinTree, QB qb, String dest, ASTNode predicate,
Map<String, Operator> aliasToOpInfo) throws SemanticException {
switch (predicate.getType()) {
case HiveParser.KW_AND:
extractJoinCondsFromWhereClause(joinTree, qb, dest,
(ASTNode) predicate.getChild(0), aliasToOpInfo);
extractJoinCondsFromWhereClause(joinTree, qb, dest,
(ASTNode) predicate.getChild(1), aliasToOpInfo);
break;
case HiveParser.EQUAL_NS:
case HiveParser.EQUAL:
ASTNode leftCondn = (ASTNode) predicate.getChild(0);
ArrayList<String> leftCondAl1 = new ArrayList<String>();
ArrayList<String> leftCondAl2 = new ArrayList<String>();
try {
parseJoinCondPopulateAlias(joinTree, leftCondn, leftCondAl1, leftCondAl2,
null, aliasToOpInfo);
} catch(SemanticException se) {
// suppress here; if it is a real issue will get caught in where clause handling.
return;
}
ASTNode rightCondn = (ASTNode) predicate.getChild(1);
ArrayList<String> rightCondAl1 = new ArrayList<String>();
ArrayList<String> rightCondAl2 = new ArrayList<String>();
try {
parseJoinCondPopulateAlias(joinTree, rightCondn, rightCondAl1,
rightCondAl2, null, aliasToOpInfo);
} catch(SemanticException se) {
// suppress here; if it is a real issue will get caught in where clause handling.
return;
}
if (((leftCondAl1.size() != 0) && (leftCondAl2.size() != 0))
|| ((rightCondAl1.size() != 0) && (rightCondAl2.size() != 0))) {
// this is not a join condition.
return;
}
if (((leftCondAl1.size() == 0) && (leftCondAl2.size() == 0))
|| ((rightCondAl1.size() == 0) && (rightCondAl2.size() == 0))) {
// this is not a join condition. Will get handled by predicate pushdown.
return;
}
List<String> leftSrc = new ArrayList<String>();
JoinCond cond = joinTree.getJoinCond()[0];
JoinType type = cond.getJoinType();
applyEqualityPredicateToQBJoinTree(joinTree, type, leftSrc,
predicate, leftCondn, rightCondn,
leftCondAl1, leftCondAl2,
rightCondAl1, rightCondAl2);
if (leftSrc.size() == 1) {
joinTree.setLeftAlias(leftSrc.get(0));
}
// todo: hold onto this predicate, so that we don't add it to the Filter Operator.
break;
default:
return;
}
}
@SuppressWarnings("nls")
public <T extends OperatorDesc> Operator<T> putOpInsertMap(Operator<T> op,
RowResolver rr) {
OpParseContext ctx = new OpParseContext(rr);
opParseCtx.put(op, ctx);
op.augmentPlan();
return op;
}
@SuppressWarnings("nls")
private Operator genHavingPlan(String dest, QB qb, Operator input,
Map<String, Operator> aliasToOpInfo)
throws SemanticException {
ASTNode havingExpr = qb.getParseInfo().getHavingForClause(dest);
OpParseContext inputCtx = opParseCtx.get(input);
RowResolver inputRR = inputCtx.getRowResolver();
Map<ASTNode, String> exprToColumnAlias = qb.getParseInfo().getAllExprToColumnAlias();
for (ASTNode astNode : exprToColumnAlias.keySet()) {
if (inputRR.getExpression(astNode) != null) {
inputRR.put("", exprToColumnAlias.get(astNode), inputRR.getExpression(astNode));
}
}
ASTNode condn = (ASTNode) havingExpr.getChild(0);
/*
* Now a having clause can contain a SubQuery predicate;
* so we invoke genFilterPlan to handle SubQuery algebraic transformation,
* just as is done for SubQuery predicates appearing in the Where Clause.
*/
Operator output = genFilterPlan(condn, qb, input, aliasToOpInfo, true);
output = putOpInsertMap(output, inputRR);
return output;
}
private Operator genPlanForSubQueryPredicate(
QB qbSQ,
ISubQueryJoinInfo subQueryPredicate) throws SemanticException {
qbSQ.setSubQueryDef(subQueryPredicate.getSubQuery());
Phase1Ctx ctx_1 = initPhase1Ctx();
doPhase1(subQueryPredicate.getSubQueryAST(), qbSQ, ctx_1);
getMetaData(qbSQ);
Operator op = genPlan(qbSQ);
return op;
}
@SuppressWarnings("nls")
private Operator genFilterPlan(ASTNode searchCond, QB qb, Operator input,
Map<String, Operator> aliasToOpInfo,
boolean forHavingClause)
throws SemanticException {
OpParseContext inputCtx = opParseCtx.get(input);
RowResolver inputRR = inputCtx.getRowResolver();
/*
* Handling of SubQuery Expressions:
* if "Where clause contains no SubQuery expressions" then
* -->[true] ===CONTINUE_FILTER_PROCESSING===
* else
* -->[false] "extract SubQuery expressions\n from Where clause"
* if "this is a nested SubQuery or \nthere are more than 1 SubQuery expressions" then
* -->[yes] "throw Unsupported Error"
* else
* --> "Rewrite Search condition to \nremove SubQuery predicate"
* --> "build QBSubQuery"
* --> "extract correlated predicates \nfrom Where Clause"
* --> "add correlated Items to \nSelect List and Group By"
* --> "construct Join Predicate \nfrom correlation predicates"
* --> "Generate Plan for\n modified SubQuery"
* --> "Build the Join Condition\n for Parent Query to SubQuery join"
* --> "Build the QBJoinTree from the Join condition"
* --> "Update Parent Query Filter\n with any Post Join conditions"
* --> ===CONTINUE_FILTER_PROCESSING===
* endif
* endif
*
* Support for Sub Queries in Having Clause:
* - By and large this works the same way as SubQueries in the Where Clause.
* - The one addum is the handling of aggregation expressions from the Outer Query
* appearing in correlation clauses.
* - So such correlating predicates are allowed:
* min(OuterQuert.x) = SubQuery.y
* - this requires special handling when converting to joins. See QBSubQuery.rewrite
* method method for detailed comments.
*/
List<ASTNode> subQueriesInOriginalTree = SubQueryUtils.findSubQueries(searchCond);
if ( subQueriesInOriginalTree.size() > 0 ) {
/*
* Restriction.9.m :: disallow nested SubQuery expressions.
*/
if (qb.getSubQueryPredicateDef() != null ) {
throw new SemanticException(ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg(
subQueriesInOriginalTree.get(0), "Nested SubQuery expressions are not supported."));
}
/*
* Restriction.8.m :: We allow only 1 SubQuery expression per Query.
*/
if (subQueriesInOriginalTree.size() > 1 ) {
throw new SemanticException(ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg(
subQueriesInOriginalTree.get(1), "Only 1 SubQuery expression is supported."));
}
/*
* Clone the Search AST; apply all rewrites on the clone.
*/
ASTNode clonedSearchCond = (ASTNode) SubQueryUtils.adaptor.dupTree(searchCond);
List<ASTNode> subQueries = SubQueryUtils.findSubQueries(clonedSearchCond);
for(int i=0; i < subQueries.size(); i++) {
ASTNode subQueryAST = subQueries.get(i);
ASTNode originalSubQueryAST = subQueriesInOriginalTree.get(i);
int sqIdx = qb.incrNumSubQueryPredicates();
clonedSearchCond = SubQueryUtils.rewriteParentQueryWhere(clonedSearchCond, subQueryAST);
QBSubQuery subQuery = SubQueryUtils.buildSubQuery(qb.getId(),
sqIdx, subQueryAST, originalSubQueryAST, ctx);
if ( !forHavingClause ) {
qb.setWhereClauseSubQueryPredicate(subQuery);
} else {
qb.setHavingClauseSubQueryPredicate(subQuery);
}
String havingInputAlias = null;
if ( forHavingClause ) {
havingInputAlias = "gby_sq" + sqIdx;
aliasToOpInfo.put(havingInputAlias, input);
}
subQuery.validateAndRewriteAST(inputRR, forHavingClause, havingInputAlias, aliasToOpInfo.keySet());
QB qbSQ = new QB(subQuery.getOuterQueryId(), subQuery.getAlias(), true);
Operator sqPlanTopOp = genPlanForSubQueryPredicate(qbSQ, subQuery);
aliasToOpInfo.put(subQuery.getAlias(), sqPlanTopOp);
RowResolver sqRR = opParseCtx.get(sqPlanTopOp).getRowResolver();
/*
* Check.5.h :: For In and Not In the SubQuery must implicitly or
* explicitly only contain one select item.
*/
if ( subQuery.getOperator().getType() != SubQueryType.EXISTS &&
subQuery.getOperator().getType() != SubQueryType.NOT_EXISTS &&
sqRR.getColumnInfos().size() -
subQuery.getNumOfCorrelationExprsAddedToSQSelect() > 1 ) {
throw new SemanticException(ErrorMsg.INVALID_SUBQUERY_EXPRESSION.getMsg(
subQueryAST, "SubQuery can contain only 1 item in Select List."));
}
/*
* If this is a Not In SubQuery Predicate then Join in the Null Check SubQuery.
* See QBSubQuery.NotInCheck for details on why and how this is constructed.
*/
if ( subQuery.getNotInCheck() != null ) {
QBSubQuery.NotInCheck notInCheck = subQuery.getNotInCheck();
notInCheck.setSQRR(sqRR);
QB qbSQ_nic = new QB(subQuery.getOuterQueryId(), notInCheck.getAlias(), true);
Operator sqnicPlanTopOp = genPlanForSubQueryPredicate(qbSQ_nic, notInCheck);
aliasToOpInfo.put(notInCheck.getAlias(), sqnicPlanTopOp);
QBJoinTree joinTree_nic = genSQJoinTree(qb, notInCheck,
input,
aliasToOpInfo);
pushJoinFilters(qb, joinTree_nic, aliasToOpInfo, false);
input = genJoinOperator(qbSQ_nic, joinTree_nic, aliasToOpInfo, input);
inputRR = opParseCtx.get(input).getRowResolver();
if ( forHavingClause ) {
aliasToOpInfo.put(havingInputAlias, input);
}
}
/*
* Gen Join between outer Operator and SQ op
*/
subQuery.buildJoinCondition(inputRR, sqRR, forHavingClause, havingInputAlias);
QBJoinTree joinTree = genSQJoinTree(qb, subQuery,
input,
aliasToOpInfo);
/*
* push filters only for this QBJoinTree. Child QBJoinTrees have already been handled.
*/
pushJoinFilters(qb, joinTree, aliasToOpInfo, false);
input = genJoinOperator(qbSQ, joinTree, aliasToOpInfo, input);
searchCond = subQuery.updateOuterQueryFilter(clonedSearchCond);
}
}
return genFilterPlan(qb, searchCond, input);
}
/**
* create a filter plan. The condition and the inputs are specified.
*
* @param qb
* current query block
* @param condn
* The condition to be resolved
* @param input
* the input operator
*/
@SuppressWarnings("nls")
private Operator genFilterPlan(QB qb, ASTNode condn, Operator input)
throws SemanticException {
OpParseContext inputCtx = opParseCtx.get(input);
RowResolver inputRR = inputCtx.getRowResolver();
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
new FilterDesc(genExprNodeDesc(condn, inputRR), false), new RowSchema(
inputRR.getColumnInfos()), input), inputRR);
if (LOG.isDebugEnabled()) {
LOG.debug("Created Filter Plan for " + qb.getId() + " row schema: "
+ inputRR.toString());
}
return output;
}
/*
* for inner joins push a 'is not null predicate' to the join sources for
* every non nullSafe predicate.
*/
private Operator genNotNullFilterForJoinSourcePlan(QB qb, Operator input,
QBJoinTree joinTree, ExprNodeDesc[] joinKeys) throws SemanticException {
if (qb == null || joinTree == null) {
return input;
}
if (!joinTree.getNoOuterJoin()) {
return input;
}
if (joinKeys == null || joinKeys.length == 0) {
return input;
}
ExprNodeDesc filterPred = null;
List<Boolean> nullSafes = joinTree.getNullSafes();
for (int i = 0; i < joinKeys.length; i++) {
if ( nullSafes.get(i)) {
continue;
}
List<ExprNodeDesc> args = new ArrayList<ExprNodeDesc>();
args.add(joinKeys[i]);
ExprNodeDesc nextExpr = ExprNodeGenericFuncDesc.newInstance(
FunctionRegistry.getFunctionInfo("isnotnull").getGenericUDF(), args);
filterPred = filterPred == null ? nextExpr : ExprNodeDescUtils
.mergePredicates(filterPred, nextExpr);
}
if (filterPred == null) {
return input;
}
OpParseContext inputCtx = opParseCtx.get(input);
RowResolver inputRR = inputCtx.getRowResolver();
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
new FilterDesc(filterPred, false),
new RowSchema(inputRR.getColumnInfos()), input), inputRR);
if (LOG.isDebugEnabled()) {
LOG.debug("Created Filter Plan for " + qb.getId() + " row schema: "
+ inputRR.toString());
}
return output;
}
@SuppressWarnings("nls")
private Integer genColListRegex(String colRegex, String tabAlias,
ASTNode sel, ArrayList<ExprNodeDesc> col_list,
RowResolver input, Integer pos, RowResolver output, List<String> aliases, boolean subQuery)
throws SemanticException {
// The table alias should exist
if (tabAlias != null && !input.hasTableAlias(tabAlias)) {
throw new SemanticException(ErrorMsg.INVALID_TABLE_ALIAS.getMsg(sel));
}
// TODO: Have to put in the support for AS clause
Pattern regex = null;
try {
regex = Pattern.compile(colRegex, Pattern.CASE_INSENSITIVE);
} catch (PatternSyntaxException e) {
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(sel, e
.getMessage()));
}
StringBuilder replacementText = new StringBuilder();
int matched = 0;
// add empty string to the list of aliases. Some operators (ex. GroupBy) add
// ColumnInfos for table alias "".
if (!aliases.contains("")) {
aliases.add("");
}
/*
* track the input ColumnInfos that are added to the output.
* if a columnInfo has multiple mappings; then add the column only once,
* but carry the mappings forward.
*/
Map<ColumnInfo, ColumnInfo> inputColsProcessed = new HashMap<ColumnInfo, ColumnInfo>();
// For expr "*", aliases should be iterated in the order they are specified
// in the query.
for (String alias : aliases) {
HashMap<String, ColumnInfo> fMap = input.getFieldMap(alias);
if (fMap == null) {
continue;
}
// For the tab.* case, add all the columns to the fieldList
// from the input schema
for (Map.Entry<String, ColumnInfo> entry : fMap.entrySet()) {
ColumnInfo colInfo = entry.getValue();
String name = colInfo.getInternalName();
String[] tmp = input.reverseLookup(name);
// Skip the colinfos which are not for this particular alias
if (tabAlias != null && !tmp[0].equalsIgnoreCase(tabAlias)) {
continue;
}
if (colInfo.getIsVirtualCol() && colInfo.isHiddenVirtualCol()) {
continue;
}
// Not matching the regex?
if (!regex.matcher(tmp[1]).matches()) {
continue;
}
if (subQuery) {
output.checkColumn(tmp[0], tmp[1]);
}
ColumnInfo oColInfo = inputColsProcessed.get(colInfo);
if (oColInfo == null) {
ExprNodeColumnDesc expr = new ExprNodeColumnDesc(colInfo.getType(),
name, colInfo.getTabAlias(), colInfo.getIsVirtualCol(),
colInfo.isSkewedCol());
col_list.add(expr);
oColInfo = new ColumnInfo(getColumnInternalName(pos),
colInfo.getType(), colInfo.getTabAlias(),
colInfo.getIsVirtualCol(), colInfo.isHiddenVirtualCol());
inputColsProcessed.put(colInfo, oColInfo);
}
output.put(tmp[0], tmp[1], oColInfo);
pos = Integer.valueOf(pos.intValue() + 1);
matched++;
if (unparseTranslator.isEnabled()) {
if (replacementText.length() > 0) {
replacementText.append(", ");
}
replacementText.append(HiveUtils.unparseIdentifier(tmp[0], conf));
replacementText.append(".");
replacementText.append(HiveUtils.unparseIdentifier(tmp[1], conf));
}
}
}
if (matched == 0) {
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(sel));
}
if (unparseTranslator.isEnabled()) {
unparseTranslator.addTranslation(sel, replacementText.toString());
}
return pos;
}
public static String getColumnInternalName(int pos) {
return HiveConf.getColumnInternalName(pos);
}
private String getScriptProgName(String cmd) {
int end = cmd.indexOf(" ");
return (end == -1) ? cmd : cmd.substring(0, end);
}
private String getScriptArgs(String cmd) {
int end = cmd.indexOf(" ");
return (end == -1) ? "" : cmd.substring(end, cmd.length());
}
private static int getPositionFromInternalName(String internalName) {
return HiveConf.getPositionFromInternalName(internalName);
}
private String fetchFilesNotInLocalFilesystem(String cmd) {
SessionState ss = SessionState.get();
String progName = getScriptProgName(cmd);
if (SessionState.canDownloadResource(progName)) {
String filePath = ss.add_resource(ResourceType.FILE, progName, true);
Path p = new Path(filePath);
String fileName = p.getName();
String scriptArgs = getScriptArgs(cmd);
String finalCmd = fileName + scriptArgs;
return finalCmd;
}
return cmd;
}
private TableDesc getTableDescFromSerDe(ASTNode child, String cols,
String colTypes, boolean defaultCols) throws SemanticException {
if (child.getType() == HiveParser.TOK_SERDENAME) {
String serdeName = unescapeSQLString(child.getChild(0).getText());
Class<? extends Deserializer> serdeClass = null;
try {
serdeClass = (Class<? extends Deserializer>) Class.forName(serdeName,
true, JavaUtils.getClassLoader());
} catch (ClassNotFoundException e) {
throw new SemanticException(e);
}
TableDesc tblDesc = PlanUtils.getTableDesc(serdeClass, Integer
.toString(Utilities.tabCode), cols, colTypes, defaultCols);
// copy all the properties
if (child.getChildCount() == 2) {
ASTNode prop = (ASTNode) ((ASTNode) child.getChild(1)).getChild(0);
for (int propChild = 0; propChild < prop.getChildCount(); propChild++) {
String key = unescapeSQLString(prop.getChild(propChild).getChild(0)
.getText());
String value = unescapeSQLString(prop.getChild(propChild).getChild(1)
.getText());
tblDesc.getProperties().setProperty(key, value);
}
}
return tblDesc;
} else if (child.getType() == HiveParser.TOK_SERDEPROPS) {
TableDesc tblDesc = PlanUtils.getDefaultTableDesc(Integer
.toString(Utilities.ctrlaCode), cols, colTypes, defaultCols);
int numChildRowFormat = child.getChildCount();
for (int numC = 0; numC < numChildRowFormat; numC++) {
ASTNode rowChild = (ASTNode) child.getChild(numC);
switch (rowChild.getToken().getType()) {
case HiveParser.TOK_TABLEROWFORMATFIELD:
String fieldDelim = unescapeSQLString(rowChild.getChild(0).getText());
tblDesc.getProperties()
.setProperty(serdeConstants.FIELD_DELIM, fieldDelim);
tblDesc.getProperties().setProperty(serdeConstants.SERIALIZATION_FORMAT,
fieldDelim);
if (rowChild.getChildCount() >= 2) {
String fieldEscape = unescapeSQLString(rowChild.getChild(1)
.getText());
tblDesc.getProperties().setProperty(serdeConstants.ESCAPE_CHAR,
fieldEscape);
}
break;
case HiveParser.TOK_TABLEROWFORMATCOLLITEMS:
tblDesc.getProperties().setProperty(serdeConstants.COLLECTION_DELIM,
unescapeSQLString(rowChild.getChild(0).getText()));
break;
case HiveParser.TOK_TABLEROWFORMATMAPKEYS:
tblDesc.getProperties().setProperty(serdeConstants.MAPKEY_DELIM,
unescapeSQLString(rowChild.getChild(0).getText()));
break;
case HiveParser.TOK_TABLEROWFORMATLINES:
String lineDelim = unescapeSQLString(rowChild.getChild(0).getText());
tblDesc.getProperties().setProperty(serdeConstants.LINE_DELIM, lineDelim);
if (!lineDelim.equals("\n") && !lineDelim.equals("10")) {
throw new SemanticException(generateErrorMessage(rowChild,
ErrorMsg.LINES_TERMINATED_BY_NON_NEWLINE.getMsg()));
}
case HiveParser.TOK_TABLEROWFORMATNULL:
String nullFormat = unescapeSQLString(rowChild.getChild(0).getText());
tblDesc.getProperties().setProperty(serdeConstants.SERIALIZATION_NULL_FORMAT,
nullFormat);
break;
default:
assert false;
}
}
return tblDesc;
}
// should never come here
return null;
}
private void failIfColAliasExists(Set<String> nameSet, String name)
throws SemanticException {
if (nameSet.contains(name)) {
throw new SemanticException(ErrorMsg.COLUMN_ALIAS_ALREADY_EXISTS
.getMsg(name));
}
nameSet.add(name);
}
@SuppressWarnings("nls")
private Operator genScriptPlan(ASTNode trfm, QB qb, Operator input)
throws SemanticException {
// If there is no "AS" clause, the output schema will be "key,value"
ArrayList<ColumnInfo> outputCols = new ArrayList<ColumnInfo>();
int inputSerDeNum = 1, inputRecordWriterNum = 2;
int outputSerDeNum = 4, outputRecordReaderNum = 5;
int outputColsNum = 6;
boolean outputColNames = false, outputColSchemas = false;
int execPos = 3;
boolean defaultOutputCols = false;
// Go over all the children
if (trfm.getChildCount() > outputColsNum) {
ASTNode outCols = (ASTNode) trfm.getChild(outputColsNum);
if (outCols.getType() == HiveParser.TOK_ALIASLIST) {
outputColNames = true;
} else if (outCols.getType() == HiveParser.TOK_TABCOLLIST) {
outputColSchemas = true;
}
}
// If column type is not specified, use a string
if (!outputColNames && !outputColSchemas) {
String intName = getColumnInternalName(0);
ColumnInfo colInfo = new ColumnInfo(intName,
TypeInfoFactory.stringTypeInfo, null, false);
colInfo.setAlias("key");
outputCols.add(colInfo);
intName = getColumnInternalName(1);
colInfo = new ColumnInfo(intName, TypeInfoFactory.stringTypeInfo, null,
false);
colInfo.setAlias("value");
outputCols.add(colInfo);
defaultOutputCols = true;
} else {
ASTNode collist = (ASTNode) trfm.getChild(outputColsNum);
int ccount = collist.getChildCount();
Set<String> colAliasNamesDuplicateCheck = new HashSet<String>();
if (outputColNames) {
for (int i = 0; i < ccount; ++i) {
String colAlias = unescapeIdentifier(((ASTNode) collist.getChild(i))
.getText());
failIfColAliasExists(colAliasNamesDuplicateCheck, colAlias);
String intName = getColumnInternalName(i);
ColumnInfo colInfo = new ColumnInfo(intName,
TypeInfoFactory.stringTypeInfo, null, false);
colInfo.setAlias(colAlias);
outputCols.add(colInfo);
}
} else {
for (int i = 0; i < ccount; ++i) {
ASTNode child = (ASTNode) collist.getChild(i);
assert child.getType() == HiveParser.TOK_TABCOL;
String colAlias = unescapeIdentifier(((ASTNode) child.getChild(0))
.getText());
failIfColAliasExists(colAliasNamesDuplicateCheck, colAlias);
String intName = getColumnInternalName(i);
ColumnInfo colInfo = new ColumnInfo(intName, TypeInfoUtils
.getTypeInfoFromTypeString(getTypeStringFromAST((ASTNode) child
.getChild(1))), null, false);
colInfo.setAlias(colAlias);
outputCols.add(colInfo);
}
}
}
RowResolver out_rwsch = new RowResolver();
StringBuilder columns = new StringBuilder();
StringBuilder columnTypes = new StringBuilder();
for (int i = 0; i < outputCols.size(); ++i) {
if (i != 0) {
columns.append(",");
columnTypes.append(",");
}
columns.append(outputCols.get(i).getInternalName());
columnTypes.append(outputCols.get(i).getType().getTypeName());
out_rwsch.put(qb.getParseInfo().getAlias(), outputCols.get(i).getAlias(),
outputCols.get(i));
}
StringBuilder inpColumns = new StringBuilder();
StringBuilder inpColumnTypes = new StringBuilder();
ArrayList<ColumnInfo> inputSchema = opParseCtx.get(input).getRowResolver()
.getColumnInfos();
for (int i = 0; i < inputSchema.size(); ++i) {
if (i != 0) {
inpColumns.append(",");
inpColumnTypes.append(",");
}
inpColumns.append(inputSchema.get(i).getInternalName());
inpColumnTypes.append(inputSchema.get(i).getType().getTypeName());
}
TableDesc outInfo;
TableDesc errInfo;
TableDesc inInfo;
String defaultSerdeName = conf.getVar(HiveConf.ConfVars.HIVESCRIPTSERDE);
Class<? extends Deserializer> serde;
try {
serde = (Class<? extends Deserializer>) Class.forName(defaultSerdeName,
true, JavaUtils.getClassLoader());
} catch (ClassNotFoundException e) {
throw new SemanticException(e);
}
int fieldSeparator = Utilities.tabCode;
if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESCRIPTESCAPE)) {
fieldSeparator = Utilities.ctrlaCode;
}
// Input and Output Serdes
if (trfm.getChild(inputSerDeNum).getChildCount() > 0) {
inInfo = getTableDescFromSerDe((ASTNode) (((ASTNode) trfm
.getChild(inputSerDeNum))).getChild(0), inpColumns.toString(),
inpColumnTypes.toString(), false);
} else {
inInfo = PlanUtils.getTableDesc(serde, Integer
.toString(fieldSeparator), inpColumns.toString(), inpColumnTypes
.toString(), false, true);
}
if (trfm.getChild(outputSerDeNum).getChildCount() > 0) {
outInfo = getTableDescFromSerDe((ASTNode) (((ASTNode) trfm
.getChild(outputSerDeNum))).getChild(0), columns.toString(),
columnTypes.toString(), false);
// This is for backward compatibility. If the user did not specify the
// output column list, we assume that there are 2 columns: key and value.
// However, if the script outputs: col1, col2, col3 seperated by TAB, the
// requirement is: key is col and value is (col2 TAB col3)
} else {
outInfo = PlanUtils.getTableDesc(serde, Integer
.toString(fieldSeparator), columns.toString(), columnTypes
.toString(), defaultOutputCols);
}
// Error stream always uses the default serde with a single column
errInfo = PlanUtils.getTableDesc(serde, Integer.toString(Utilities.tabCode), "KEY");
// Output record readers
Class<? extends RecordReader> outRecordReader = getRecordReader((ASTNode) trfm
.getChild(outputRecordReaderNum));
Class<? extends RecordWriter> inRecordWriter = getRecordWriter((ASTNode) trfm
.getChild(inputRecordWriterNum));
Class<? extends RecordReader> errRecordReader = getDefaultRecordReader();
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
new ScriptDesc(
fetchFilesNotInLocalFilesystem(stripQuotes(trfm.getChild(execPos).getText())),
inInfo, inRecordWriter, outInfo, outRecordReader, errRecordReader, errInfo),
new RowSchema(out_rwsch.getColumnInfos()), input), out_rwsch);
output.setColumnExprMap(new HashMap<String, ExprNodeDesc>()); // disable backtracking
return output;
}
private Class<? extends RecordReader> getRecordReader(ASTNode node)
throws SemanticException {
String name;
if (node.getChildCount() == 0) {
name = conf.getVar(HiveConf.ConfVars.HIVESCRIPTRECORDREADER);
} else {
name = unescapeSQLString(node.getChild(0).getText());
}
try {
return (Class<? extends RecordReader>) Class.forName(name, true,
JavaUtils.getClassLoader());
} catch (ClassNotFoundException e) {
throw new SemanticException(e);
}
}
private Class<? extends RecordReader> getDefaultRecordReader()
throws SemanticException {
String name;
name = conf.getVar(HiveConf.ConfVars.HIVESCRIPTRECORDREADER);
try {
return (Class<? extends RecordReader>) Class.forName(name, true,
JavaUtils.getClassLoader());
} catch (ClassNotFoundException e) {
throw new SemanticException(e);
}
}
private Class<? extends RecordWriter> getRecordWriter(ASTNode node)
throws SemanticException {
String name;
if (node.getChildCount() == 0) {
name = conf.getVar(HiveConf.ConfVars.HIVESCRIPTRECORDWRITER);
} else {
name = unescapeSQLString(node.getChild(0).getText());
}
try {
return (Class<? extends RecordWriter>) Class.forName(name, true,
JavaUtils.getClassLoader());
} catch (ClassNotFoundException e) {
throw new SemanticException(e);
}
}
private List<Integer> getGroupingSetsForRollup(int size) {
List<Integer> groupingSetKeys = new ArrayList<Integer>();
for (int i = 0; i <= size; i++) {
groupingSetKeys.add((1 << i) - 1);
}
return groupingSetKeys;
}
private List<Integer> getGroupingSetsForCube(int size) {
int count = 1 << size;
List<Integer> results = new ArrayList<Integer>(count);
for (int i = 0; i < count; ++i) {
results.add(i);
}
return results;
}
// This function returns the grouping sets along with the grouping expressions
// Even if rollups and cubes are present in the query, they are converted to
// grouping sets at this point
private ObjectPair<List<ASTNode>, List<Integer>> getGroupByGroupingSetsForClause(
QBParseInfo parseInfo, String dest) throws SemanticException {
List<Integer> groupingSets = new ArrayList<Integer>();
List<ASTNode> groupByExprs = getGroupByForClause(parseInfo, dest);
if (parseInfo.getDestRollups().contains(dest)) {
groupingSets = getGroupingSetsForRollup(groupByExprs.size());
} else if (parseInfo.getDestCubes().contains(dest)) {
groupingSets = getGroupingSetsForCube(groupByExprs.size());
} else if (parseInfo.getDestGroupingSets().contains(dest)) {
groupingSets = getGroupingSets(groupByExprs, parseInfo, dest);
}
return new ObjectPair<List<ASTNode>, List<Integer>>(groupByExprs, groupingSets);
}
private List<Integer> getGroupingSets(List<ASTNode> groupByExpr, QBParseInfo parseInfo,
String dest) throws SemanticException {
Map<String, Integer> exprPos = new HashMap<String, Integer>();
for (int i = 0; i < groupByExpr.size(); ++i) {
ASTNode node = groupByExpr.get(i);
exprPos.put(node.toStringTree(), i);
}
ASTNode root = parseInfo.getGroupByForClause(dest);
List<Integer> result = new ArrayList<Integer>(root == null ? 0 : root.getChildCount());
if (root != null) {
for (int i = 0; i < root.getChildCount(); ++i) {
ASTNode child = (ASTNode) root.getChild(i);
if (child.getType() != HiveParser.TOK_GROUPING_SETS_EXPRESSION) {
continue;
}
int bitmap = 0;
for (int j = 0; j < child.getChildCount(); ++j) {
String treeAsString = child.getChild(j).toStringTree();
Integer pos = exprPos.get(treeAsString);
if (pos == null) {
throw new SemanticException(
generateErrorMessage((ASTNode) child.getChild(j),
ErrorMsg.HIVE_GROUPING_SETS_EXPR_NOT_IN_GROUPBY.getErrorCodedMsg()));
}
bitmap = setBit(bitmap, pos);
}
result.add(bitmap);
}
}
if (checkForNoAggr(result)) {
throw new SemanticException(
ErrorMsg.HIVE_GROUPING_SETS_AGGR_NOFUNC.getMsg());
}
return result;
}
private boolean checkForNoAggr(List<Integer> bitmaps) {
boolean ret = true;
for (int mask : bitmaps) {
ret &= mask == 0;
}
return ret;
}
private int setBit(int bitmap, int bitIdx) {
return bitmap | (1 << bitIdx);
}
/**
* This function is a wrapper of parseInfo.getGroupByForClause which
* automatically translates SELECT DISTINCT a,b,c to SELECT a,b,c GROUP BY
* a,b,c.
*/
static List<ASTNode> getGroupByForClause(QBParseInfo parseInfo, String dest) {
if (parseInfo.getSelForClause(dest).getToken().getType() == HiveParser.TOK_SELECTDI) {
ASTNode selectExprs = parseInfo.getSelForClause(dest);
List<ASTNode> result = new ArrayList<ASTNode>(selectExprs == null ? 0
: selectExprs.getChildCount());
if (selectExprs != null) {
HashMap<String, ASTNode> windowingExprs = parseInfo.getWindowingExprsForClause(dest);
for (int i = 0; i < selectExprs.getChildCount(); ++i) {
if (((ASTNode) selectExprs.getChild(i)).getToken().getType() == HiveParser.TOK_HINTLIST) {
continue;
}
// table.column AS alias
ASTNode grpbyExpr = (ASTNode) selectExprs.getChild(i).getChild(0);
/*
* If this is handled by Windowing then ignore it.
*/
if (windowingExprs != null && windowingExprs.containsKey(grpbyExpr.toStringTree())) {
continue;
}
result.add(grpbyExpr);
}
}
return result;
} else {
ASTNode grpByExprs = parseInfo.getGroupByForClause(dest);
List<ASTNode> result = new ArrayList<ASTNode>(grpByExprs == null ? 0
: grpByExprs.getChildCount());
if (grpByExprs != null) {
for (int i = 0; i < grpByExprs.getChildCount(); ++i) {
ASTNode grpbyExpr = (ASTNode) grpByExprs.getChild(i);
if (grpbyExpr.getType() != HiveParser.TOK_GROUPING_SETS_EXPRESSION) {
result.add(grpbyExpr);
}
}
}
return result;
}
}
private static String[] getColAlias(ASTNode selExpr, String defaultName,
RowResolver inputRR, boolean includeFuncName, int colNum) {
String colAlias = null;
String tabAlias = null;
String[] colRef = new String[2];
//for queries with a windowing expressions, the selexpr may have a third child
if (selExpr.getChildCount() == 2 ||
(selExpr.getChildCount() == 3 &&
selExpr.getChild(2).getType() == HiveParser.TOK_WINDOWSPEC)) {
// return zz for "xx + yy AS zz"
colAlias = unescapeIdentifier(selExpr.getChild(1).getText());
colRef[0] = tabAlias;
colRef[1] = colAlias;
return colRef;
}
ASTNode root = (ASTNode) selExpr.getChild(0);
if (root.getType() == HiveParser.TOK_TABLE_OR_COL) {
colAlias =
BaseSemanticAnalyzer.unescapeIdentifier(root.getChild(0).getText());
colRef[0] = tabAlias;
colRef[1] = colAlias;
return colRef;
}
if (root.getType() == HiveParser.DOT) {
ASTNode tab = (ASTNode) root.getChild(0);
if (tab.getType() == HiveParser.TOK_TABLE_OR_COL) {
String t = unescapeIdentifier(tab.getChild(0).getText());
if (inputRR.hasTableAlias(t)) {
tabAlias = t;
}
}
// Return zz for "xx.zz" and "xx.yy.zz"
ASTNode col = (ASTNode) root.getChild(1);
if (col.getType() == HiveParser.Identifier) {
colAlias = unescapeIdentifier(col.getText());
}
}
// if specified generate alias using func name
if (includeFuncName && (root.getType() == HiveParser.TOK_FUNCTION)) {
String expr_flattened = root.toStringTree();
// remove all TOK tokens
String expr_no_tok = expr_flattened.replaceAll("TOK_\\S+", "");
// remove all non alphanumeric letters, replace whitespace spans with underscore
String expr_formatted = expr_no_tok.replaceAll("\\W", " ").trim().replaceAll("\\s+", "_");
// limit length to 20 chars
if (expr_formatted.length() > AUTOGEN_COLALIAS_PRFX_MAXLENGTH) {
expr_formatted = expr_formatted.substring(0, AUTOGEN_COLALIAS_PRFX_MAXLENGTH);
}
// append colnum to make it unique
colAlias = expr_formatted.concat("_" + colNum);
}
if (colAlias == null) {
// Return defaultName if selExpr is not a simple xx.yy.zz
colAlias = defaultName + colNum;
}
colRef[0] = tabAlias;
colRef[1] = colAlias;
return colRef;
}
/**
* Returns whether the pattern is a regex expression (instead of a normal
* string). Normal string is a string with all alphabets/digits and "_".
*/
private static boolean isRegex(String pattern, HiveConf conf) {
String qIdSupport = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_QUOTEDID_SUPPORT);
if ( "column".equals(qIdSupport)) {
return false;
}
for (int i = 0; i < pattern.length(); i++) {
if (!Character.isLetterOrDigit(pattern.charAt(i))
&& pattern.charAt(i) != '_') {
return true;
}
}
return false;
}
private Operator<?> genSelectPlan(String dest, QB qb, Operator<?> input)
throws SemanticException {
ASTNode selExprList = qb.getParseInfo().getSelForClause(dest);
Operator<?> op = genSelectPlan(selExprList, qb, input, false);
if (LOG.isDebugEnabled()) {
LOG.debug("Created Select Plan for clause: " + dest);
}
return op;
}
@SuppressWarnings("nls")
private Operator<?> genSelectPlan(ASTNode selExprList, QB qb,
Operator<?> input, boolean outerLV) throws SemanticException {
if (LOG.isDebugEnabled()) {
LOG.debug("tree: " + selExprList.toStringTree());
}
ArrayList<ExprNodeDesc> col_list = new ArrayList<ExprNodeDesc>();
RowResolver out_rwsch = new RowResolver();
ASTNode trfm = null;
Integer pos = Integer.valueOf(0);
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
// SELECT * or SELECT TRANSFORM(*)
boolean selectStar = false;
int posn = 0;
boolean hintPresent = (selExprList.getChild(0).getType() == HiveParser.TOK_HINTLIST);
if (hintPresent) {
posn++;
}
boolean subQuery = qb.getParseInfo().getIsSubQ();
boolean isInTransform = (selExprList.getChild(posn).getChild(0).getType() ==
HiveParser.TOK_TRANSFORM);
if (isInTransform) {
queryProperties.setUsesScript(true);
globalLimitCtx.setHasTransformOrUDTF(true);
trfm = (ASTNode) selExprList.getChild(posn).getChild(0);
}
// Detect queries of the form SELECT udtf(col) AS ...
// by looking for a function as the first child, and then checking to see
// if the function is a Generic UDTF. It's not as clean as TRANSFORM due to
// the lack of a special token.
boolean isUDTF = false;
String udtfTableAlias = null;
ArrayList<String> udtfColAliases = new ArrayList<String>();
ASTNode udtfExpr = (ASTNode) selExprList.getChild(posn).getChild(0);
GenericUDTF genericUDTF = null;
int udtfExprType = udtfExpr.getType();
if (udtfExprType == HiveParser.TOK_FUNCTION
|| udtfExprType == HiveParser.TOK_FUNCTIONSTAR) {
String funcName = TypeCheckProcFactory.DefaultExprProcessor
.getFunctionText(udtfExpr, true);
FunctionInfo fi = FunctionRegistry.getFunctionInfo(funcName);
if (fi != null) {
genericUDTF = fi.getGenericUDTF();
}
isUDTF = (genericUDTF != null);
if (isUDTF) {
globalLimitCtx.setHasTransformOrUDTF(true);
}
if (isUDTF && !fi.isNative()) {
unparseTranslator.addIdentifierTranslation((ASTNode) udtfExpr
.getChild(0));
}
if (isUDTF && (selectStar = udtfExprType == HiveParser.TOK_FUNCTIONSTAR)) {
genColListRegex(".*", null, (ASTNode) udtfExpr.getChild(0),
col_list, inputRR, pos, out_rwsch, qb.getAliases(), subQuery);
}
}
if (isUDTF) {
// Only support a single expression when it's a UDTF
if (selExprList.getChildCount() > 1) {
throw new SemanticException(generateErrorMessage(
(ASTNode) selExprList.getChild(1),
ErrorMsg.UDTF_MULTIPLE_EXPR.getMsg()));
}
ASTNode selExpr = (ASTNode) selExprList.getChild(posn);
// Get the column / table aliases from the expression. Start from 1 as
// 0 is the TOK_FUNCTION
// column names also can be inferred from result of UDTF
for (int i = 1; i < selExpr.getChildCount(); i++) {
ASTNode selExprChild = (ASTNode) selExpr.getChild(i);
switch (selExprChild.getType()) {
case HiveParser.Identifier:
udtfColAliases.add(unescapeIdentifier(selExprChild.getText()));
unparseTranslator.addIdentifierTranslation(selExprChild);
break;
case HiveParser.TOK_TABALIAS:
assert (selExprChild.getChildCount() == 1);
udtfTableAlias = unescapeIdentifier(selExprChild.getChild(0)
.getText());
qb.addAlias(udtfTableAlias);
unparseTranslator.addIdentifierTranslation((ASTNode) selExprChild
.getChild(0));
break;
default:
assert (false);
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("UDTF table alias is " + udtfTableAlias);
LOG.debug("UDTF col aliases are " + udtfColAliases);
}
}
// The list of expressions after SELECT or SELECT TRANSFORM.
ASTNode exprList;
if (isInTransform) {
exprList = (ASTNode) trfm.getChild(0);
} else if (isUDTF) {
exprList = udtfExpr;
} else {
exprList = selExprList;
}
if (LOG.isDebugEnabled()) {
LOG.debug("genSelectPlan: input = " + inputRR.toString());
}
// For UDTF's, skip the function name to get the expressions
int startPosn = isUDTF ? posn + 1 : posn;
if (isInTransform) {
startPosn = 0;
}
Set<String> colAliases = new HashSet<String>();
ASTNode[] exprs = new ASTNode[exprList.getChildCount()];
String[][] aliases = new String[exprList.getChildCount()][];
boolean[] hasAsClauses = new boolean[exprList.getChildCount()];
// Iterate over all expression (either after SELECT, or in SELECT TRANSFORM)
for (int i = startPosn; i < exprList.getChildCount(); ++i) {
// child can be EXPR AS ALIAS, or EXPR.
ASTNode child = (ASTNode) exprList.getChild(i);
boolean hasAsClause = (!isInTransform) && (child.getChildCount() == 2);
boolean isWindowSpec = child.getChildCount() == 3 &&
child.getChild(2).getType() == HiveParser.TOK_WINDOWSPEC;
// EXPR AS (ALIAS,...) parses, but is only allowed for UDTF's
// This check is not needed and invalid when there is a transform b/c the
// AST's are slightly different.
if (!isWindowSpec && !isInTransform && !isUDTF && child.getChildCount() > 2) {
throw new SemanticException(generateErrorMessage(
(ASTNode) child.getChild(2),
ErrorMsg.INVALID_AS.getMsg()));
}
// The real expression
ASTNode expr;
String tabAlias;
String colAlias;
if (isInTransform || isUDTF) {
tabAlias = null;
colAlias = autogenColAliasPrfxLbl + i;
expr = child;
} else {
// Get rid of TOK_SELEXPR
expr = (ASTNode) child.getChild(0);
String[] colRef = getColAlias(child, autogenColAliasPrfxLbl, inputRR,
autogenColAliasPrfxIncludeFuncName, i);
tabAlias = colRef[0];
colAlias = colRef[1];
if (hasAsClause) {
unparseTranslator.addIdentifierTranslation((ASTNode) child
.getChild(1));
}
}
exprs[i] = expr;
aliases[i] = new String[] {tabAlias, colAlias};
hasAsClauses[i] = hasAsClause;
colAliases.add(colAlias);
}
// Iterate over all expression (either after SELECT, or in SELECT TRANSFORM)
for (int i = startPosn; i < exprList.getChildCount(); ++i) {
// The real expression
ASTNode expr = exprs[i];
String tabAlias = aliases[i][0];
String colAlias = aliases[i][1];
boolean hasAsClause = hasAsClauses[i];
if (expr.getType() == HiveParser.TOK_ALLCOLREF) {
pos = genColListRegex(".*", expr.getChildCount() == 0 ? null
: getUnescapedName((ASTNode) expr.getChild(0)).toLowerCase(),
expr, col_list, inputRR, pos, out_rwsch, qb.getAliases(), subQuery);
selectStar = true;
} else if (expr.getType() == HiveParser.TOK_TABLE_OR_COL && !hasAsClause
&& !inputRR.getIsExprResolver()
&& isRegex(unescapeIdentifier(expr.getChild(0).getText()), conf)) {
// In case the expression is a regex COL.
// This can only happen without AS clause
// We don't allow this for ExprResolver - the Group By case
pos = genColListRegex(unescapeIdentifier(expr.getChild(0).getText()),
null, expr, col_list, inputRR, pos, out_rwsch, qb.getAliases(), subQuery);
} else if (expr.getType() == HiveParser.DOT
&& expr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL
&& inputRR.hasTableAlias(unescapeIdentifier(expr.getChild(0)
.getChild(0).getText().toLowerCase())) && !hasAsClause
&& !inputRR.getIsExprResolver()
&& isRegex(unescapeIdentifier(expr.getChild(1).getText()), conf)) {
// In case the expression is TABLE.COL (col can be regex).
// This can only happen without AS clause
// We don't allow this for ExprResolver - the Group By case
pos = genColListRegex(unescapeIdentifier(expr.getChild(1).getText()),
unescapeIdentifier(expr.getChild(0).getChild(0).getText()
.toLowerCase()), expr, col_list, inputRR, pos, out_rwsch,
qb.getAliases(), subQuery);
} else {
// Case when this is an expression
TypeCheckCtx tcCtx = new TypeCheckCtx(inputRR);
// We allow stateful functions in the SELECT list (but nowhere else)
tcCtx.setAllowStatefulFunctions(true);
tcCtx.setAllowDistinctFunctions(false);
ExprNodeDesc exp = genExprNodeDesc(expr, inputRR, tcCtx);
String recommended = recommendName(exp, colAlias);
if (recommended != null && !colAliases.contains(recommended) &&
out_rwsch.get(null, recommended) == null) {
colAlias = recommended;
}
col_list.add(exp);
if (subQuery) {
out_rwsch.checkColumn(tabAlias, colAlias);
}
ColumnInfo colInfo = new ColumnInfo(getColumnInternalName(pos),
exp.getWritableObjectInspector(), tabAlias, false);
colInfo.setSkewedCol((exp instanceof ExprNodeColumnDesc) ? ((ExprNodeColumnDesc) exp)
.isSkewedCol() : false);
out_rwsch.put(tabAlias, colAlias, colInfo);
if ( exp instanceof ExprNodeColumnDesc ) {
ExprNodeColumnDesc colExp = (ExprNodeColumnDesc) exp;
String[] altMapping = inputRR.getAlternateMappings(colExp.getColumn());
if ( altMapping != null ) {
out_rwsch.put(altMapping[0], altMapping[1], colInfo);
}
}
pos = Integer.valueOf(pos.intValue() + 1);
}
}
selectStar = selectStar && exprList.getChildCount() == posn + 1;
ArrayList<String> columnNames = new ArrayList<String>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
for (int i = 0; i < col_list.size(); i++) {
// Replace NULL with CAST(NULL AS STRING)
if (col_list.get(i) instanceof ExprNodeNullDesc) {
col_list.set(i, new ExprNodeConstantDesc(
TypeInfoFactory.stringTypeInfo, null));
}
String outputCol = getColumnInternalName(i);
colExprMap.put(outputCol, col_list.get(i));
columnNames.add(outputCol);
}
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
new SelectDesc(col_list, columnNames, selectStar), new RowSchema(
out_rwsch.getColumnInfos()), input), out_rwsch);
output.setColumnExprMap(colExprMap);
if (isInTransform) {
output = genScriptPlan(trfm, qb, output);
}
if (isUDTF) {
output = genUDTFPlan(genericUDTF, udtfTableAlias, udtfColAliases, qb,
output, outerLV);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Created Select Plan row schema: " + out_rwsch.toString());
}
return output;
}
private String recommendName(ExprNodeDesc exp, String colAlias) {
if (!colAlias.startsWith(autogenColAliasPrfxLbl)) {
return null;
}
String column = ExprNodeDescUtils.recommendInputName(exp);
if (column != null && !column.startsWith(autogenColAliasPrfxLbl)) {
return column;
}
return null;
}
/**
* Class to store GenericUDAF related information.
*/
static class GenericUDAFInfo {
ArrayList<ExprNodeDesc> convertedParameters;
GenericUDAFEvaluator genericUDAFEvaluator;
TypeInfo returnType;
}
/**
* Convert exprNodeDesc array to Typeinfo array.
*/
static ArrayList<TypeInfo> getTypeInfo(ArrayList<ExprNodeDesc> exprs) {
ArrayList<TypeInfo> result = new ArrayList<TypeInfo>();
for (ExprNodeDesc expr : exprs) {
result.add(expr.getTypeInfo());
}
return result;
}
/**
* Convert exprNodeDesc array to ObjectInspector array.
*/
static ArrayList<ObjectInspector> getWritableObjectInspector(ArrayList<ExprNodeDesc> exprs) {
ArrayList<ObjectInspector> result = new ArrayList<ObjectInspector>();
for (ExprNodeDesc expr : exprs) {
result.add(expr.getWritableObjectInspector());
}
return result;
}
/**
* Convert exprNodeDesc array to Typeinfo array.
*/
static ObjectInspector[] getStandardObjectInspector(ArrayList<TypeInfo> exprs) {
ObjectInspector[] result = new ObjectInspector[exprs.size()];
for (int i = 0; i < exprs.size(); i++) {
result[i] = TypeInfoUtils
.getStandardWritableObjectInspectorFromTypeInfo(exprs.get(i));
}
return result;
}
/**
* Returns the GenericUDAFEvaluator for the aggregation. This is called once
* for each GroupBy aggregation.
*/
static GenericUDAFEvaluator getGenericUDAFEvaluator(String aggName,
ArrayList<ExprNodeDesc> aggParameters, ASTNode aggTree,
boolean isDistinct, boolean isAllColumns)
throws SemanticException {
ArrayList<ObjectInspector> originalParameterTypeInfos =
getWritableObjectInspector(aggParameters);
GenericUDAFEvaluator result = FunctionRegistry.getGenericUDAFEvaluator(
aggName, originalParameterTypeInfos, isDistinct, isAllColumns);
if (null == result) {
String reason = "Looking for UDAF Evaluator\"" + aggName
+ "\" with parameters " + originalParameterTypeInfos;
throw new SemanticException(ErrorMsg.INVALID_FUNCTION_SIGNATURE.getMsg(
(ASTNode) aggTree.getChild(0), reason));
}
return result;
}
/**
* Returns the GenericUDAFInfo struct for the aggregation.
*
* @param aggName
* The name of the UDAF.
* @param aggParameters
* The exprNodeDesc of the original parameters
* @param aggTree
* The ASTNode node of the UDAF in the query.
* @return GenericUDAFInfo
* @throws SemanticException
* when the UDAF is not found or has problems.
*/
static GenericUDAFInfo getGenericUDAFInfo(GenericUDAFEvaluator evaluator,
GenericUDAFEvaluator.Mode emode, ArrayList<ExprNodeDesc> aggParameters)
throws SemanticException {
GenericUDAFInfo r = new GenericUDAFInfo();
// set r.genericUDAFEvaluator
r.genericUDAFEvaluator = evaluator;
// set r.returnType
ObjectInspector returnOI = null;
try {
ArrayList<ObjectInspector> aggOIs = getWritableObjectInspector(aggParameters);
ObjectInspector[] aggOIArray = new ObjectInspector[aggOIs.size()];
for (int ii = 0; ii < aggOIs.size(); ++ii) {
aggOIArray[ii] = aggOIs.get(ii);
}
returnOI = r.genericUDAFEvaluator.init(emode, aggOIArray);
r.returnType = TypeInfoUtils.getTypeInfoFromObjectInspector(returnOI);
} catch (HiveException e) {
throw new SemanticException(e);
}
// set r.convertedParameters
// TODO: type conversion
r.convertedParameters = aggParameters;
return r;
}
private static GenericUDAFEvaluator.Mode groupByDescModeToUDAFMode(
GroupByDesc.Mode mode, boolean isDistinct) {
switch (mode) {
case COMPLETE:
return GenericUDAFEvaluator.Mode.COMPLETE;
case PARTIAL1:
return GenericUDAFEvaluator.Mode.PARTIAL1;
case PARTIAL2:
return GenericUDAFEvaluator.Mode.PARTIAL2;
case PARTIALS:
return isDistinct ? GenericUDAFEvaluator.Mode.PARTIAL1
: GenericUDAFEvaluator.Mode.PARTIAL2;
case FINAL:
return GenericUDAFEvaluator.Mode.FINAL;
case HASH:
return GenericUDAFEvaluator.Mode.PARTIAL1;
case MERGEPARTIAL:
return isDistinct ? GenericUDAFEvaluator.Mode.COMPLETE
: GenericUDAFEvaluator.Mode.FINAL;
default:
throw new RuntimeException("internal error in groupByDescModeToUDAFMode");
}
}
/**
* Check if the given internalName represents a constant parameter in aggregation parameters
* of an aggregation tree.
* This method is only invoked when map-side aggregation is not involved. In this case,
* every parameter in every aggregation tree should already have a corresponding ColumnInfo,
* which is generated when the corresponding ReduceSinkOperator of the GroupByOperator being
* generating is generated. If we find that this parameter is a constant parameter,
* we will return the corresponding ExprNodeDesc in reduceValues, and we will not need to
* use a new ExprNodeColumnDesc, which can not be treated as a constant parameter, for this
* parameter (since the writableObjectInspector of a ExprNodeColumnDesc will not be
* a instance of ConstantObjectInspector).
*
* @param reduceValues
* value columns of the corresponding ReduceSinkOperator
* @param internalName
* the internal name of this parameter
* @return the ExprNodeDesc of the constant parameter if the given internalName represents
* a constant parameter; otherwise, return null
*/
private ExprNodeDesc isConstantParameterInAggregationParameters(String internalName,
List<ExprNodeDesc> reduceValues) {
// only the pattern of "VALUE._col([0-9]+)" should be handled.
String[] terms = internalName.split("\\.");
if (terms.length != 2 || reduceValues == null) {
return null;
}
if (Utilities.ReduceField.VALUE.toString().equals(terms[0])) {
int pos = getPositionFromInternalName(terms[1]);
if (pos >= 0 && pos < reduceValues.size()) {
ExprNodeDesc reduceValue = reduceValues.get(pos);
if (reduceValue != null) {
if (reduceValue.getWritableObjectInspector() instanceof ConstantObjectInspector) {
// this internalName represents a constant parameter in aggregation parameters
return reduceValue;
}
}
}
}
return null;
}
/**
* Generate the GroupByOperator for the Query Block (parseInfo.getXXX(dest)).
* The new GroupByOperator will be a child of the reduceSinkOperatorInfo.
*
* @param mode
* The mode of the aggregation (PARTIAL1 or COMPLETE)
* @param genericUDAFEvaluators
* If not null, this function will store the mapping from Aggregation
* StringTree to the genericUDAFEvaluator in this parameter, so it
* can be used in the next-stage GroupBy aggregations.
* @return the new GroupByOperator
*/
@SuppressWarnings("nls")
private Operator genGroupByPlanGroupByOperator(QBParseInfo parseInfo,
String dest, Operator input, ReduceSinkOperator rs, GroupByDesc.Mode mode,
Map<String, GenericUDAFEvaluator> genericUDAFEvaluators)
throws SemanticException {
RowResolver groupByInputRowResolver = opParseCtx
.get(input).getRowResolver();
RowResolver groupByOutputRowResolver = new RowResolver();
groupByOutputRowResolver.setIsExprResolver(true);
ArrayList<ExprNodeDesc> groupByKeys = new ArrayList<ExprNodeDesc>();
ArrayList<AggregationDesc> aggregations = new ArrayList<AggregationDesc>();
ArrayList<String> outputColumnNames = new ArrayList<String>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
List<ASTNode> grpByExprs = getGroupByForClause(parseInfo, dest);
for (int i = 0; i < grpByExprs.size(); ++i) {
ASTNode grpbyExpr = grpByExprs.get(i);
ColumnInfo exprInfo = groupByInputRowResolver.getExpression(grpbyExpr);
if (exprInfo == null) {
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(grpbyExpr));
}
groupByKeys.add(new ExprNodeColumnDesc(exprInfo.getType(), exprInfo
.getInternalName(), "", false));
String field = getColumnInternalName(i);
outputColumnNames.add(field);
ColumnInfo oColInfo = new ColumnInfo(field, exprInfo.getType(), null, false);
groupByOutputRowResolver.putExpression(grpbyExpr,
oColInfo);
addAlternateGByKeyMappings(grpbyExpr, oColInfo, input, groupByOutputRowResolver);
colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
}
// For each aggregation
HashMap<String, ASTNode> aggregationTrees = parseInfo
.getAggregationExprsForClause(dest);
assert (aggregationTrees != null);
// get the last colName for the reduce KEY
// it represents the column name corresponding to distinct aggr, if any
String lastKeyColName = null;
List<String> inputKeyCols = rs.getConf().getOutputKeyColumnNames();
if (inputKeyCols.size() > 0) {
lastKeyColName = inputKeyCols.get(inputKeyCols.size() - 1);
}
List<ExprNodeDesc> reduceValues = rs.getConf().getValueCols();
int numDistinctUDFs = 0;
for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
ASTNode value = entry.getValue();
// This is the GenericUDAF name
String aggName = unescapeIdentifier(value.getChild(0).getText());
boolean isDistinct = value.getType() == HiveParser.TOK_FUNCTIONDI;
boolean isAllColumns = value.getType() == HiveParser.TOK_FUNCTIONSTAR;
// Convert children to aggParameters
ArrayList<ExprNodeDesc> aggParameters = new ArrayList<ExprNodeDesc>();
// 0 is the function name
for (int i = 1; i < value.getChildCount(); i++) {
ASTNode paraExpr = (ASTNode) value.getChild(i);
ColumnInfo paraExprInfo =
groupByInputRowResolver.getExpression(paraExpr);
if (paraExprInfo == null) {
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(paraExpr));
}
String paraExpression = paraExprInfo.getInternalName();
assert (paraExpression != null);
if (isDistinct && lastKeyColName != null) {
// if aggr is distinct, the parameter is name is constructed as
// KEY.lastKeyColName:<tag>._colx
paraExpression = Utilities.ReduceField.KEY.name() + "." +
lastKeyColName + ":" + numDistinctUDFs + "." +
getColumnInternalName(i - 1);
}
ExprNodeDesc expr = new ExprNodeColumnDesc(paraExprInfo.getType(),
paraExpression, paraExprInfo.getTabAlias(),
paraExprInfo.getIsVirtualCol());
ExprNodeDesc reduceValue = isConstantParameterInAggregationParameters(
paraExprInfo.getInternalName(), reduceValues);
if (reduceValue != null) {
// this parameter is a constant
expr = reduceValue;
}
aggParameters.add(expr);
}
if (isDistinct) {
numDistinctUDFs++;
}
Mode amode = groupByDescModeToUDAFMode(mode, isDistinct);
GenericUDAFEvaluator genericUDAFEvaluator = getGenericUDAFEvaluator(
aggName, aggParameters, value, isDistinct, isAllColumns);
assert (genericUDAFEvaluator != null);
GenericUDAFInfo udaf = getGenericUDAFInfo(genericUDAFEvaluator, amode,
aggParameters);
aggregations.add(new AggregationDesc(aggName.toLowerCase(),
udaf.genericUDAFEvaluator, udaf.convertedParameters, isDistinct,
amode));
String field = getColumnInternalName(groupByKeys.size()
+ aggregations.size() - 1);
outputColumnNames.add(field);
groupByOutputRowResolver.putExpression(value, new ColumnInfo(
field, udaf.returnType, "", false));
// Save the evaluator so that it can be used by the next-stage
// GroupByOperators
if (genericUDAFEvaluators != null) {
genericUDAFEvaluators.put(entry.getKey(), genericUDAFEvaluator);
}
}
float groupByMemoryUsage = HiveConf.getFloatVar(conf, HiveConf.ConfVars.HIVEMAPAGGRHASHMEMORY);
float memoryThreshold = HiveConf
.getFloatVar(conf, HiveConf.ConfVars.HIVEMAPAGGRMEMORYTHRESHOLD);
Operator op = putOpInsertMap(OperatorFactory.getAndMakeChild(
new GroupByDesc(mode, outputColumnNames, groupByKeys, aggregations,
false, groupByMemoryUsage, memoryThreshold, null, false, 0, numDistinctUDFs > 0),
new RowSchema(groupByOutputRowResolver.getColumnInfos()),
input), groupByOutputRowResolver);
op.setColumnExprMap(colExprMap);
return op;
}
// Add the grouping set key to the group by operator.
// This is not the first group by operator, but it is a subsequent group by operator
// which is forwarding the grouping keys introduced by the grouping sets.
// For eg: consider: select key, value, count(1) from T group by key, value with rollup.
// Assuming map-side aggregation and no skew, the plan would look like:
//
// TableScan --> Select --> GroupBy1 --> ReduceSink --> GroupBy2 --> Select --> FileSink
//
// This function is called for GroupBy2 to pass the additional grouping keys introduced by
// GroupBy1 for the grouping set (corresponding to the rollup).
private void addGroupingSetKey(List<ExprNodeDesc> groupByKeys,
RowResolver groupByInputRowResolver,
RowResolver groupByOutputRowResolver,
List<String> outputColumnNames,
Map<String, ExprNodeDesc> colExprMap) throws SemanticException {
// For grouping sets, add a dummy grouping key
String groupingSetColumnName =
groupByInputRowResolver.get(null, VirtualColumn.GROUPINGID.getName()).getInternalName();
ExprNodeDesc inputExpr = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,
groupingSetColumnName, null, false);
groupByKeys.add(inputExpr);
String field = getColumnInternalName(groupByKeys.size() - 1);
outputColumnNames.add(field);
groupByOutputRowResolver.put(null, VirtualColumn.GROUPINGID.getName(),
new ColumnInfo(
field,
TypeInfoFactory.stringTypeInfo,
null,
true));
colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
}
// Process grouping set for the reduce sink operator
// For eg: consider: select key, value, count(1) from T group by key, value with rollup.
// Assuming map-side aggregation and no skew, the plan would look like:
//
// TableScan --> Select --> GroupBy1 --> ReduceSink --> GroupBy2 --> Select --> FileSink
//
// This function is called for ReduceSink to add the additional grouping keys introduced by
// GroupBy1 into the reduce keys.
private void processGroupingSetReduceSinkOperator(RowResolver reduceSinkInputRowResolver,
RowResolver reduceSinkOutputRowResolver,
List<ExprNodeDesc> reduceKeys,
List<String> outputKeyColumnNames,
Map<String, ExprNodeDesc> colExprMap) throws SemanticException {
// add a key for reduce sink
String groupingSetColumnName =
reduceSinkInputRowResolver.get(null, VirtualColumn.GROUPINGID.getName()).getInternalName();
ExprNodeDesc inputExpr = new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,
groupingSetColumnName, null, false);
reduceKeys.add(inputExpr);
outputKeyColumnNames.add(getColumnInternalName(reduceKeys.size() - 1));
String field = Utilities.ReduceField.KEY.toString() + "."
+ getColumnInternalName(reduceKeys.size() - 1);
ColumnInfo colInfo = new ColumnInfo(field, reduceKeys.get(
reduceKeys.size() - 1).getTypeInfo(), null, true);
reduceSinkOutputRowResolver.put(null, VirtualColumn.GROUPINGID.getName(), colInfo);
colExprMap.put(colInfo.getInternalName(), inputExpr);
}
/**
* Generate the GroupByOperator for the Query Block (parseInfo.getXXX(dest)).
* The new GroupByOperator will be a child of the reduceSinkOperatorInfo.
*
* @param mode
* The mode of the aggregation (MERGEPARTIAL, PARTIAL2)
* @param genericUDAFEvaluators
* The mapping from Aggregation StringTree to the
* genericUDAFEvaluator.
* @param distPartAggr
* partial aggregation for distincts
* @param groupingSets
* list of grouping sets
* @param groupingSetsPresent
* whether grouping sets are present in this query
* @param groupingSetsConsumedCurrentMR
* whether grouping sets are consumed by this group by
* @return the new GroupByOperator
*/
@SuppressWarnings("nls")
private Operator genGroupByPlanGroupByOperator1(QBParseInfo parseInfo,
String dest, Operator reduceSinkOperatorInfo, GroupByDesc.Mode mode,
Map<String, GenericUDAFEvaluator> genericUDAFEvaluators,
boolean distPartAgg,
List<Integer> groupingSets,
boolean groupingSetsPresent,
boolean groupingSetsNeedAdditionalMRJob) throws SemanticException {
ArrayList<String> outputColumnNames = new ArrayList<String>();
RowResolver groupByInputRowResolver = opParseCtx
.get(reduceSinkOperatorInfo).getRowResolver();
RowResolver groupByOutputRowResolver = new RowResolver();
groupByOutputRowResolver.setIsExprResolver(true);
ArrayList<ExprNodeDesc> groupByKeys = new ArrayList<ExprNodeDesc>();
ArrayList<AggregationDesc> aggregations = new ArrayList<AggregationDesc>();
List<ASTNode> grpByExprs = getGroupByForClause(parseInfo, dest);
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
for (int i = 0; i < grpByExprs.size(); ++i) {
ASTNode grpbyExpr = grpByExprs.get(i);
ColumnInfo exprInfo = groupByInputRowResolver.getExpression(grpbyExpr);
if (exprInfo == null) {
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(grpbyExpr));
}
groupByKeys.add(new ExprNodeColumnDesc(exprInfo.getType(), exprInfo
.getInternalName(), exprInfo.getTabAlias(), exprInfo
.getIsVirtualCol()));
String field = getColumnInternalName(i);
outputColumnNames.add(field);
ColumnInfo oColInfo = new ColumnInfo(field, exprInfo.getType(), "", false);
groupByOutputRowResolver.putExpression(grpbyExpr,
oColInfo);
addAlternateGByKeyMappings(grpbyExpr, oColInfo, reduceSinkOperatorInfo, groupByOutputRowResolver);
colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
}
// This is only needed if a new grouping set key is being created
int groupingSetsPosition = 0;
// For grouping sets, add a dummy grouping key
if (groupingSetsPresent) {
// Consider the query: select a,b, count(1) from T group by a,b with cube;
// where it is being executed in a single map-reduce job
// The plan is TableScan -> GroupBy1 -> ReduceSink -> GroupBy2 -> FileSink
// GroupBy1 already added the grouping id as part of the row
// This function is called for GroupBy2 to add grouping id as part of the groupby keys
if (!groupingSetsNeedAdditionalMRJob) {
addGroupingSetKey(
groupByKeys,
groupByInputRowResolver,
groupByOutputRowResolver,
outputColumnNames,
colExprMap);
}
else {
groupingSetsPosition = groupByKeys.size();
// The grouping set has not yet been processed. Create a new grouping key
// Consider the query: select a,b, count(1) from T group by a,b with cube;
// where it is being executed in 2 map-reduce jobs
// The plan for 1st MR is TableScan -> GroupBy1 -> ReduceSink -> GroupBy2 -> FileSink
// GroupBy1/ReduceSink worked as if grouping sets were not present
// This function is called for GroupBy2 to create new rows for grouping sets
// For each input row (a,b), 4 rows are created for the example above:
// (a,b), (a,null), (null, b), (null, null)
createNewGroupingKey(groupByKeys,
outputColumnNames,
groupByOutputRowResolver,
colExprMap);
}
}
HashMap<String, ASTNode> aggregationTrees = parseInfo
.getAggregationExprsForClause(dest);
// get the last colName for the reduce KEY
// it represents the column name corresponding to distinct aggr, if any
String lastKeyColName = null;
List<ExprNodeDesc> reduceValues = null;
if (reduceSinkOperatorInfo.getConf() instanceof ReduceSinkDesc) {
List<String> inputKeyCols = ((ReduceSinkDesc)
reduceSinkOperatorInfo.getConf()).getOutputKeyColumnNames();
if (inputKeyCols.size() > 0) {
lastKeyColName = inputKeyCols.get(inputKeyCols.size() - 1);
}
reduceValues = ((ReduceSinkDesc) reduceSinkOperatorInfo.getConf()).getValueCols();
}
int numDistinctUDFs = 0;
boolean containsDistinctAggr = false;
for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
ASTNode value = entry.getValue();
String aggName = unescapeIdentifier(value.getChild(0).getText());
ArrayList<ExprNodeDesc> aggParameters = new ArrayList<ExprNodeDesc>();
boolean isDistinct = (value.getType() == HiveParser.TOK_FUNCTIONDI);
containsDistinctAggr = containsDistinctAggr || isDistinct;
// If the function is distinct, partial aggregation has not been done on
// the client side.
// If distPartAgg is set, the client is letting us know that partial
// aggregation has not been done.
// For eg: select a, count(b+c), count(distinct d+e) group by a
// For count(b+c), if partial aggregation has been performed, then we
// directly look for count(b+c).
// Otherwise, we look for b+c.
// For distincts, partial aggregation is never performed on the client
// side, so always look for the parameters: d+e
boolean partialAggDone = !(distPartAgg || isDistinct);
if (!partialAggDone) {
// 0 is the function name
for (int i = 1; i < value.getChildCount(); i++) {
ASTNode paraExpr = (ASTNode) value.getChild(i);
ColumnInfo paraExprInfo =
groupByInputRowResolver.getExpression(paraExpr);
if (paraExprInfo == null) {
throw new SemanticException(ErrorMsg.INVALID_COLUMN
.getMsg(paraExpr));
}
String paraExpression = paraExprInfo.getInternalName();
assert (paraExpression != null);
if (isDistinct && lastKeyColName != null) {
// if aggr is distinct, the parameter is name is constructed as
// KEY.lastKeyColName:<tag>._colx
paraExpression = Utilities.ReduceField.KEY.name() + "." +
lastKeyColName + ":" + numDistinctUDFs + "."
+ getColumnInternalName(i - 1);
}
ExprNodeDesc expr = new ExprNodeColumnDesc(paraExprInfo.getType(),
paraExpression, paraExprInfo.getTabAlias(),
paraExprInfo.getIsVirtualCol());
ExprNodeDesc reduceValue = isConstantParameterInAggregationParameters(
paraExprInfo.getInternalName(), reduceValues);
if (reduceValue != null) {
// this parameter is a constant
expr = reduceValue;
}
aggParameters.add(expr);
}
} else {
ColumnInfo paraExprInfo = groupByInputRowResolver.getExpression(value);
if (paraExprInfo == null) {
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(value));
}
String paraExpression = paraExprInfo.getInternalName();
assert (paraExpression != null);
aggParameters.add(new ExprNodeColumnDesc(paraExprInfo.getType(),
paraExpression, paraExprInfo.getTabAlias(), paraExprInfo
.getIsVirtualCol()));
}
if (isDistinct) {
numDistinctUDFs++;
}
boolean isAllColumns = value.getType() == HiveParser.TOK_FUNCTIONSTAR;
Mode amode = groupByDescModeToUDAFMode(mode, isDistinct);
GenericUDAFEvaluator genericUDAFEvaluator = null;
// For distincts, partial aggregations have not been done
if (distPartAgg) {
genericUDAFEvaluator = getGenericUDAFEvaluator(aggName, aggParameters,
value, isDistinct, isAllColumns);
assert (genericUDAFEvaluator != null);
genericUDAFEvaluators.put(entry.getKey(), genericUDAFEvaluator);
} else {
genericUDAFEvaluator = genericUDAFEvaluators.get(entry.getKey());
assert (genericUDAFEvaluator != null);
}
GenericUDAFInfo udaf = getGenericUDAFInfo(genericUDAFEvaluator, amode,
aggParameters);
aggregations.add(new AggregationDesc(aggName.toLowerCase(),
udaf.genericUDAFEvaluator, udaf.convertedParameters,
(mode != GroupByDesc.Mode.FINAL && isDistinct), amode));
String field = getColumnInternalName(groupByKeys.size()
+ aggregations.size() - 1);
outputColumnNames.add(field);
groupByOutputRowResolver.putExpression(value, new ColumnInfo(
field, udaf.returnType, "", false));
}
float groupByMemoryUsage = HiveConf.getFloatVar(conf, HiveConf.ConfVars.HIVEMAPAGGRHASHMEMORY);
float memoryThreshold = HiveConf
.getFloatVar(conf, HiveConf.ConfVars.HIVEMAPAGGRMEMORYTHRESHOLD);
// Nothing special needs to be done for grouping sets if
// this is the final group by operator, and multiple rows corresponding to the
// grouping sets have been generated upstream.
// However, if an addition MR job has been created to handle grouping sets,
// additional rows corresponding to grouping sets need to be created here.
Operator op = putOpInsertMap(OperatorFactory.getAndMakeChild(
new GroupByDesc(mode, outputColumnNames, groupByKeys, aggregations,
distPartAgg, groupByMemoryUsage, memoryThreshold,
groupingSets,
groupingSetsPresent && groupingSetsNeedAdditionalMRJob,
groupingSetsPosition, containsDistinctAggr),
new RowSchema(groupByOutputRowResolver.getColumnInfos()), reduceSinkOperatorInfo),
groupByOutputRowResolver);
op.setColumnExprMap(colExprMap);
return op;
}
/*
* Create a new grouping key for grouping id.
* A dummy grouping id. is added. At runtime, the group by operator
* creates 'n' rows per input row, where 'n' is the number of grouping sets.
*/
private void createNewGroupingKey(List<ExprNodeDesc> groupByKeys,
List<String> outputColumnNames,
RowResolver groupByOutputRowResolver,
Map<String, ExprNodeDesc> colExprMap) {
// The value for the constant does not matter. It is replaced by the grouping set
// value for the actual implementation
ExprNodeConstantDesc constant = new ExprNodeConstantDesc("0");
groupByKeys.add(constant);
String field = getColumnInternalName(groupByKeys.size() - 1);
outputColumnNames.add(field);
groupByOutputRowResolver.put(null, VirtualColumn.GROUPINGID.getName(),
new ColumnInfo(
field,
TypeInfoFactory.stringTypeInfo,
null,
true));
colExprMap.put(field, constant);
}
/**
* Generate the map-side GroupByOperator for the Query Block
* (qb.getParseInfo().getXXX(dest)). The new GroupByOperator will be a child
* of the inputOperatorInfo.
*
* @param mode
* The mode of the aggregation (HASH)
* @param genericUDAFEvaluators
* If not null, this function will store the mapping from Aggregation
* StringTree to the genericUDAFEvaluator in this parameter, so it
* can be used in the next-stage GroupBy aggregations.
* @return the new GroupByOperator
*/
@SuppressWarnings("nls")
private Operator genGroupByPlanMapGroupByOperator(QB qb,
String dest,
List<ASTNode> grpByExprs,
Operator inputOperatorInfo,
GroupByDesc.Mode mode,
Map<String, GenericUDAFEvaluator> genericUDAFEvaluators,
List<Integer> groupingSetKeys,
boolean groupingSetsPresent) throws SemanticException {
RowResolver groupByInputRowResolver = opParseCtx.get(inputOperatorInfo)
.getRowResolver();
QBParseInfo parseInfo = qb.getParseInfo();
RowResolver groupByOutputRowResolver = new RowResolver();
groupByOutputRowResolver.setIsExprResolver(true);
ArrayList<ExprNodeDesc> groupByKeys = new ArrayList<ExprNodeDesc>();
ArrayList<String> outputColumnNames = new ArrayList<String>();
ArrayList<AggregationDesc> aggregations = new ArrayList<AggregationDesc>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
for (int i = 0; i < grpByExprs.size(); ++i) {
ASTNode grpbyExpr = grpByExprs.get(i);
ExprNodeDesc grpByExprNode = genExprNodeDesc(grpbyExpr,
groupByInputRowResolver);
groupByKeys.add(grpByExprNode);
String field = getColumnInternalName(i);
outputColumnNames.add(field);
groupByOutputRowResolver.putExpression(grpbyExpr,
new ColumnInfo(field, grpByExprNode.getTypeInfo(), "", false));
colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
}
// The grouping set key is present after the grouping keys, before the distinct keys
int groupingSetsPosition = groupByKeys.size();
// For grouping sets, add a dummy grouping key
// This dummy key needs to be added as a reduce key
// For eg: consider: select key, value, count(1) from T group by key, value with rollup.
// Assuming map-side aggregation and no skew, the plan would look like:
//
// TableScan --> Select --> GroupBy1 --> ReduceSink --> GroupBy2 --> Select --> FileSink
//
// This function is called for GroupBy1 to create an additional grouping key
// for the grouping set (corresponding to the rollup).
if (groupingSetsPresent) {
createNewGroupingKey(groupByKeys,
outputColumnNames,
groupByOutputRowResolver,
colExprMap);
}
// If there is a distinctFuncExp, add all parameters to the reduceKeys.
if (!parseInfo.getDistinctFuncExprsForClause(dest).isEmpty()) {
List<ASTNode> list = parseInfo.getDistinctFuncExprsForClause(dest);
for (ASTNode value : list) {
// 0 is function name
for (int i = 1; i < value.getChildCount(); i++) {
ASTNode parameter = (ASTNode) value.getChild(i);
if (groupByOutputRowResolver.getExpression(parameter) == null) {
ExprNodeDesc distExprNode = genExprNodeDesc(parameter,
groupByInputRowResolver);
groupByKeys.add(distExprNode);
String field = getColumnInternalName(groupByKeys.size() - 1);
outputColumnNames.add(field);
groupByOutputRowResolver.putExpression(parameter, new ColumnInfo(
field, distExprNode.getTypeInfo(), "", false));
colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
}
}
}
}
// For each aggregation
HashMap<String, ASTNode> aggregationTrees = parseInfo
.getAggregationExprsForClause(dest);
assert (aggregationTrees != null);
boolean containsDistinctAggr = false;
for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
ASTNode value = entry.getValue();
String aggName = unescapeIdentifier(value.getChild(0).getText());
ArrayList<ExprNodeDesc> aggParameters = new ArrayList<ExprNodeDesc>();
// 0 is the function name
for (int i = 1; i < value.getChildCount(); i++) {
ASTNode paraExpr = (ASTNode) value.getChild(i);
ExprNodeDesc paraExprNode = genExprNodeDesc(paraExpr,
groupByInputRowResolver);
aggParameters.add(paraExprNode);
}
boolean isDistinct = value.getType() == HiveParser.TOK_FUNCTIONDI;
containsDistinctAggr = containsDistinctAggr || isDistinct;
boolean isAllColumns = value.getType() == HiveParser.TOK_FUNCTIONSTAR;
Mode amode = groupByDescModeToUDAFMode(mode, isDistinct);
GenericUDAFEvaluator genericUDAFEvaluator = getGenericUDAFEvaluator(
aggName, aggParameters, value, isDistinct, isAllColumns);
assert (genericUDAFEvaluator != null);
GenericUDAFInfo udaf = getGenericUDAFInfo(genericUDAFEvaluator, amode,
aggParameters);
aggregations.add(new AggregationDesc(aggName.toLowerCase(),
udaf.genericUDAFEvaluator, udaf.convertedParameters, isDistinct,
amode));
String field = getColumnInternalName(groupByKeys.size()
+ aggregations.size() - 1);
outputColumnNames.add(field);
if (groupByOutputRowResolver.getExpression(value) == null) {
groupByOutputRowResolver.putExpression(value, new ColumnInfo(
field, udaf.returnType, "", false));
}
// Save the evaluator so that it can be used by the next-stage
// GroupByOperators
if (genericUDAFEvaluators != null) {
genericUDAFEvaluators.put(entry.getKey(), genericUDAFEvaluator);
}
}
float groupByMemoryUsage = HiveConf.getFloatVar(conf, HiveConf.ConfVars.HIVEMAPAGGRHASHMEMORY);
float memoryThreshold = HiveConf
.getFloatVar(conf, HiveConf.ConfVars.HIVEMAPAGGRMEMORYTHRESHOLD);
Operator op = putOpInsertMap(OperatorFactory.getAndMakeChild(
new GroupByDesc(mode, outputColumnNames, groupByKeys, aggregations,
false, groupByMemoryUsage, memoryThreshold,
groupingSetKeys, groupingSetsPresent, groupingSetsPosition, containsDistinctAggr),
new RowSchema(groupByOutputRowResolver.getColumnInfos()),
inputOperatorInfo), groupByOutputRowResolver);
op.setColumnExprMap(colExprMap);
return op;
}
/**
* Generate the ReduceSinkOperator for the Group By Query Block
* (qb.getPartInfo().getXXX(dest)). The new ReduceSinkOperator will be a child
* of inputOperatorInfo.
*
* It will put all Group By keys and the distinct field (if any) in the
* map-reduce sort key, and all other fields in the map-reduce value.
*
* @param numPartitionFields
* the number of fields for map-reduce partitioning. This is usually
* the number of fields in the Group By keys.
* @return the new ReduceSinkOperator.
* @throws SemanticException
*/
@SuppressWarnings("nls")
private ReduceSinkOperator genGroupByPlanReduceSinkOperator(QB qb,
String dest,
Operator inputOperatorInfo,
List<ASTNode> grpByExprs,
int numPartitionFields,
boolean changeNumPartitionFields,
int numReducers,
boolean mapAggrDone,
boolean groupingSetsPresent) throws SemanticException {
RowResolver reduceSinkInputRowResolver = opParseCtx.get(inputOperatorInfo)
.getRowResolver();
QBParseInfo parseInfo = qb.getParseInfo();
RowResolver reduceSinkOutputRowResolver = new RowResolver();
reduceSinkOutputRowResolver.setIsExprResolver(true);
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
// Pre-compute group-by keys and store in reduceKeys
List<String> outputKeyColumnNames = new ArrayList<String>();
List<String> outputValueColumnNames = new ArrayList<String>();
ArrayList<ExprNodeDesc> reduceKeys = getReduceKeysForReduceSink(grpByExprs, dest,
reduceSinkInputRowResolver, reduceSinkOutputRowResolver, outputKeyColumnNames,
colExprMap);
// add a key for reduce sink
if (groupingSetsPresent) {
// Process grouping set for the reduce sink operator
processGroupingSetReduceSinkOperator(
reduceSinkInputRowResolver,
reduceSinkOutputRowResolver,
reduceKeys,
outputKeyColumnNames,
colExprMap);
if (changeNumPartitionFields) {
numPartitionFields++;
}
}
List<List<Integer>> distinctColIndices = getDistinctColIndicesForReduceSink(parseInfo, dest,
reduceKeys, reduceSinkInputRowResolver, reduceSinkOutputRowResolver, outputKeyColumnNames,
colExprMap);
ArrayList<ExprNodeDesc> reduceValues = new ArrayList<ExprNodeDesc>();
HashMap<String, ASTNode> aggregationTrees = parseInfo
.getAggregationExprsForClause(dest);
if (!mapAggrDone) {
getReduceValuesForReduceSinkNoMapAgg(parseInfo, dest, reduceSinkInputRowResolver,
reduceSinkOutputRowResolver, outputValueColumnNames, reduceValues, colExprMap);
} else {
// Put partial aggregation results in reduceValues
int inputField = reduceKeys.size();
for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
TypeInfo type = reduceSinkInputRowResolver.getColumnInfos().get(
inputField).getType();
ExprNodeColumnDesc exprDesc = new ExprNodeColumnDesc(type,
getColumnInternalName(inputField), "", false);
reduceValues.add(exprDesc);
inputField++;
String outputColName = getColumnInternalName(reduceValues.size() - 1);
outputValueColumnNames.add(outputColName);
String internalName = Utilities.ReduceField.VALUE.toString() + "."
+ outputColName;
reduceSinkOutputRowResolver.putExpression(entry.getValue(),
new ColumnInfo(internalName, type, null, false));
colExprMap.put(internalName, exprDesc);
}
}
ReduceSinkOperator rsOp = (ReduceSinkOperator) putOpInsertMap(
OperatorFactory.getAndMakeChild(
PlanUtils.getReduceSinkDesc(reduceKeys,
groupingSetsPresent ? grpByExprs.size() + 1 : grpByExprs.size(),
reduceValues, distinctColIndices,
outputKeyColumnNames, outputValueColumnNames, true, -1, numPartitionFields,
numReducers),
new RowSchema(reduceSinkOutputRowResolver.getColumnInfos()), inputOperatorInfo),
reduceSinkOutputRowResolver);
rsOp.setColumnExprMap(colExprMap);
return rsOp;
}
private ArrayList<ExprNodeDesc> getReduceKeysForReduceSink(List<ASTNode> grpByExprs, String dest,
RowResolver reduceSinkInputRowResolver, RowResolver reduceSinkOutputRowResolver,
List<String> outputKeyColumnNames, Map<String, ExprNodeDesc> colExprMap)
throws SemanticException {
ArrayList<ExprNodeDesc> reduceKeys = new ArrayList<ExprNodeDesc>();
for (int i = 0; i < grpByExprs.size(); ++i) {
ASTNode grpbyExpr = grpByExprs.get(i);
ExprNodeDesc inputExpr = genExprNodeDesc(grpbyExpr,
reduceSinkInputRowResolver);
reduceKeys.add(inputExpr);
if (reduceSinkOutputRowResolver.getExpression(grpbyExpr) == null) {
outputKeyColumnNames.add(getColumnInternalName(reduceKeys.size() - 1));
String field = Utilities.ReduceField.KEY.toString() + "."
+ getColumnInternalName(reduceKeys.size() - 1);
ColumnInfo colInfo = new ColumnInfo(field, reduceKeys.get(
reduceKeys.size() - 1).getTypeInfo(), null, false);
reduceSinkOutputRowResolver.putExpression(grpbyExpr, colInfo);
colExprMap.put(colInfo.getInternalName(), inputExpr);
} else {
throw new SemanticException(ErrorMsg.DUPLICATE_GROUPBY_KEY
.getMsg(grpbyExpr));
}
}
return reduceKeys;
}
private List<List<Integer>> getDistinctColIndicesForReduceSink(QBParseInfo parseInfo,
String dest,
List<ExprNodeDesc> reduceKeys, RowResolver reduceSinkInputRowResolver,
RowResolver reduceSinkOutputRowResolver, List<String> outputKeyColumnNames,
Map<String, ExprNodeDesc> colExprMap)
throws SemanticException {
List<List<Integer>> distinctColIndices = new ArrayList<List<Integer>>();
// If there is a distinctFuncExp, add all parameters to the reduceKeys.
if (!parseInfo.getDistinctFuncExprsForClause(dest).isEmpty()) {
List<ASTNode> distFuncs = parseInfo.getDistinctFuncExprsForClause(dest);
String colName = getColumnInternalName(reduceKeys.size());
outputKeyColumnNames.add(colName);
for (int i = 0; i < distFuncs.size(); i++) {
ASTNode value = distFuncs.get(i);
int numExprs = 0;
List<Integer> distinctIndices = new ArrayList<Integer>();
// 0 is function name
for (int j = 1; j < value.getChildCount(); j++) {
ASTNode parameter = (ASTNode) value.getChild(j);
ExprNodeDesc expr = genExprNodeDesc(parameter, reduceSinkInputRowResolver);
// see if expr is already present in reduceKeys.
// get index of expr in reduceKeys
int ri;
for (ri = 0; ri < reduceKeys.size(); ri++) {
if (reduceKeys.get(ri).getExprString().equals(expr.getExprString())) {
break;
}
}
// add the expr to reduceKeys if it is not present
if (ri == reduceKeys.size()) {
String name = getColumnInternalName(numExprs);
String field = Utilities.ReduceField.KEY.toString() + "." + colName
+ ":" + i
+ "." + name;
ColumnInfo colInfo = new ColumnInfo(field, expr.getTypeInfo(), null, false);
reduceSinkOutputRowResolver.putExpression(parameter, colInfo);
colExprMap.put(field, expr);
reduceKeys.add(expr);
}
// add the index of expr in reduceKeys to distinctIndices
distinctIndices.add(ri);
numExprs++;
}
distinctColIndices.add(distinctIndices);
}
}
return distinctColIndices;
}
private void getReduceValuesForReduceSinkNoMapAgg(QBParseInfo parseInfo, String dest,
RowResolver reduceSinkInputRowResolver, RowResolver reduceSinkOutputRowResolver,
List<String> outputValueColumnNames, ArrayList<ExprNodeDesc> reduceValues,
Map<String, ExprNodeDesc> colExprMap) throws SemanticException {
HashMap<String, ASTNode> aggregationTrees = parseInfo
.getAggregationExprsForClause(dest);
// Put parameters to aggregations in reduceValues
for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
ASTNode value = entry.getValue();
// 0 is function name
for (int i = 1; i < value.getChildCount(); i++) {
ASTNode parameter = (ASTNode) value.getChild(i);
if (reduceSinkOutputRowResolver.getExpression(parameter) == null) {
ExprNodeDesc exprDesc = genExprNodeDesc(parameter, reduceSinkInputRowResolver);
reduceValues.add(exprDesc);
outputValueColumnNames
.add(getColumnInternalName(reduceValues.size() - 1));
String field = Utilities.ReduceField.VALUE.toString() + "."
+ getColumnInternalName(reduceValues.size() - 1);
reduceSinkOutputRowResolver.putExpression(parameter, new ColumnInfo(field,
reduceValues.get(reduceValues.size() - 1).getTypeInfo(), null,
false));
colExprMap.put(field, exprDesc);
}
}
}
}
@SuppressWarnings("nls")
private ReduceSinkOperator genCommonGroupByPlanReduceSinkOperator(QB qb, List<String> dests,
Operator inputOperatorInfo) throws SemanticException {
RowResolver reduceSinkInputRowResolver = opParseCtx.get(inputOperatorInfo)
.getRowResolver();
QBParseInfo parseInfo = qb.getParseInfo();
RowResolver reduceSinkOutputRowResolver = new RowResolver();
reduceSinkOutputRowResolver.setIsExprResolver(true);
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
// The group by keys and distinct keys should be the same for all dests, so using the first
// one to produce these will be the same as using any other.
String dest = dests.get(0);
// Pre-compute group-by keys and store in reduceKeys
List<String> outputKeyColumnNames = new ArrayList<String>();
List<String> outputValueColumnNames = new ArrayList<String>();
List<ASTNode> grpByExprs = getGroupByForClause(parseInfo, dest);
ArrayList<ExprNodeDesc> reduceKeys = getReduceKeysForReduceSink(grpByExprs, dest,
reduceSinkInputRowResolver, reduceSinkOutputRowResolver, outputKeyColumnNames,
colExprMap);
List<List<Integer>> distinctColIndices = getDistinctColIndicesForReduceSink(parseInfo, dest,
reduceKeys, reduceSinkInputRowResolver, reduceSinkOutputRowResolver, outputKeyColumnNames,
colExprMap);
ArrayList<ExprNodeDesc> reduceValues = new ArrayList<ExprNodeDesc>();
// The dests can have different non-distinct aggregations, so we have to iterate over all of
// them
for (String destination : dests) {
getReduceValuesForReduceSinkNoMapAgg(parseInfo, destination, reduceSinkInputRowResolver,
reduceSinkOutputRowResolver, outputValueColumnNames, reduceValues, colExprMap);
// Need to pass all of the columns used in the where clauses as reduce values
ASTNode whereClause = parseInfo.getWhrForClause(destination);
if (whereClause != null) {
assert whereClause.getChildCount() == 1;
ASTNode predicates = (ASTNode) whereClause.getChild(0);
Map<ASTNode, ExprNodeDesc> nodeOutputs =
genAllExprNodeDesc(predicates, reduceSinkInputRowResolver);
removeMappingForKeys(predicates, nodeOutputs, reduceKeys);
// extract columns missing in current RS key/value
for (Map.Entry<ASTNode, ExprNodeDesc> entry : nodeOutputs.entrySet()) {
ASTNode parameter = entry.getKey();
ExprNodeDesc expression = entry.getValue();
if (!(expression instanceof ExprNodeColumnDesc)) {
continue;
}
if (ExprNodeDescUtils.indexOf(expression, reduceValues) >= 0) {
continue;
}
String internalName = getColumnInternalName(reduceValues.size());
String field = Utilities.ReduceField.VALUE.toString() + "." + internalName;
reduceValues.add(expression);
outputValueColumnNames.add(internalName);
reduceSinkOutputRowResolver.putExpression(parameter,
new ColumnInfo(field, expression.getTypeInfo(), null, false));
colExprMap.put(field, expression);
}
}
}
ReduceSinkOperator rsOp = (ReduceSinkOperator) putOpInsertMap(
OperatorFactory.getAndMakeChild(PlanUtils.getReduceSinkDesc(reduceKeys,
grpByExprs.size(), reduceValues, distinctColIndices,
outputKeyColumnNames, outputValueColumnNames, true, -1, grpByExprs.size(),
-1), new RowSchema(reduceSinkOutputRowResolver
.getColumnInfos()), inputOperatorInfo), reduceSinkOutputRowResolver);
rsOp.setColumnExprMap(colExprMap);
return rsOp;
}
// Remove expression node descriptor and children of it for a given predicate
// from mapping if it's already on RS keys.
// Remaining column expressions would be a candidate for an RS value
private void removeMappingForKeys(ASTNode predicate, Map<ASTNode, ExprNodeDesc> mapping,
List<ExprNodeDesc> keys) {
ExprNodeDesc expr = mapping.get(predicate);
if (expr != null && ExprNodeDescUtils.indexOf(expr, keys) >= 0) {
removeRecursively(predicate, mapping);
} else {
for (int i = 0; i < predicate.getChildCount(); i++) {
removeMappingForKeys((ASTNode) predicate.getChild(i), mapping, keys);
}
}
}
// Remove expression node desc and all children of it from mapping
private void removeRecursively(ASTNode current, Map<ASTNode, ExprNodeDesc> mapping) {
mapping.remove(current);
for (int i = 0; i < current.getChildCount(); i++) {
removeRecursively((ASTNode) current.getChild(i), mapping);
}
}
/**
* Generate the second ReduceSinkOperator for the Group By Plan
* (parseInfo.getXXX(dest)). The new ReduceSinkOperator will be a child of
* groupByOperatorInfo.
*
* The second ReduceSinkOperator will put the group by keys in the map-reduce
* sort key, and put the partial aggregation results in the map-reduce value.
*
* @param numPartitionFields
* the number of fields in the map-reduce partition key. This should
* always be the same as the number of Group By keys. We should be
* able to remove this parameter since in this phase there is no
* distinct any more.
* @return the new ReduceSinkOperator.
* @throws SemanticException
*/
@SuppressWarnings("nls")
private Operator genGroupByPlanReduceSinkOperator2MR(QBParseInfo parseInfo,
String dest,
Operator groupByOperatorInfo,
int numPartitionFields,
int numReducers,
boolean groupingSetsPresent) throws SemanticException {
RowResolver reduceSinkInputRowResolver2 = opParseCtx.get(
groupByOperatorInfo).getRowResolver();
RowResolver reduceSinkOutputRowResolver2 = new RowResolver();
reduceSinkOutputRowResolver2.setIsExprResolver(true);
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
ArrayList<ExprNodeDesc> reduceKeys = new ArrayList<ExprNodeDesc>();
ArrayList<String> outputColumnNames = new ArrayList<String>();
// Get group-by keys and store in reduceKeys
List<ASTNode> grpByExprs = getGroupByForClause(parseInfo, dest);
for (int i = 0; i < grpByExprs.size(); ++i) {
ASTNode grpbyExpr = grpByExprs.get(i);
String field = getColumnInternalName(i);
outputColumnNames.add(field);
TypeInfo typeInfo = reduceSinkInputRowResolver2.getExpression(
grpbyExpr).getType();
ExprNodeColumnDesc inputExpr = new ExprNodeColumnDesc(typeInfo, field,
"", false);
reduceKeys.add(inputExpr);
ColumnInfo colInfo = new ColumnInfo(Utilities.ReduceField.KEY.toString()
+ "." + field, typeInfo, "", false);
reduceSinkOutputRowResolver2.putExpression(grpbyExpr, colInfo);
colExprMap.put(colInfo.getInternalName(), inputExpr);
}
// add a key for reduce sink
if (groupingSetsPresent) {
// Note that partitioning fields dont need to change, since it is either
// partitioned randomly, or by all grouping keys + distinct keys
processGroupingSetReduceSinkOperator(
reduceSinkInputRowResolver2,
reduceSinkOutputRowResolver2,
reduceKeys,
outputColumnNames,
colExprMap);
}
// Get partial aggregation results and store in reduceValues
ArrayList<ExprNodeDesc> reduceValues = new ArrayList<ExprNodeDesc>();
int inputField = reduceKeys.size();
HashMap<String, ASTNode> aggregationTrees = parseInfo
.getAggregationExprsForClause(dest);
for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
String field = getColumnInternalName(inputField);
ASTNode t = entry.getValue();
TypeInfo typeInfo = reduceSinkInputRowResolver2.getExpression(t)
.getType();
ExprNodeColumnDesc exprDesc = new ExprNodeColumnDesc(typeInfo, field, "", false);
reduceValues.add(exprDesc);
inputField++;
String col = getColumnInternalName(reduceValues.size() - 1);
outputColumnNames.add(col);
reduceSinkOutputRowResolver2.putExpression(t, new ColumnInfo(
Utilities.ReduceField.VALUE.toString() + "." + col, typeInfo, "",
false));
colExprMap.put(col, exprDesc);
}
ReduceSinkOperator rsOp = (ReduceSinkOperator) putOpInsertMap(
OperatorFactory.getAndMakeChild(PlanUtils.getReduceSinkDesc(reduceKeys,
reduceValues, outputColumnNames, true, -1, numPartitionFields,
numReducers), new RowSchema(reduceSinkOutputRowResolver2
.getColumnInfos()), groupByOperatorInfo),
reduceSinkOutputRowResolver2);
rsOp.setColumnExprMap(colExprMap);
return rsOp;
}
/**
* Generate the second GroupByOperator for the Group By Plan
* (parseInfo.getXXX(dest)). The new GroupByOperator will do the second
* aggregation based on the partial aggregation results.
*
* @param mode
* the mode of aggregation (FINAL)
* @param genericUDAFEvaluators
* The mapping from Aggregation StringTree to the
* genericUDAFEvaluator.
* @return the new GroupByOperator
* @throws SemanticException
*/
@SuppressWarnings("nls")
private Operator genGroupByPlanGroupByOperator2MR(QBParseInfo parseInfo,
String dest,
Operator reduceSinkOperatorInfo2,
GroupByDesc.Mode mode,
Map<String, GenericUDAFEvaluator> genericUDAFEvaluators,
boolean groupingSetsPresent) throws SemanticException {
RowResolver groupByInputRowResolver2 = opParseCtx.get(
reduceSinkOperatorInfo2).getRowResolver();
RowResolver groupByOutputRowResolver2 = new RowResolver();
groupByOutputRowResolver2.setIsExprResolver(true);
ArrayList<ExprNodeDesc> groupByKeys = new ArrayList<ExprNodeDesc>();
ArrayList<AggregationDesc> aggregations = new ArrayList<AggregationDesc>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
List<ASTNode> grpByExprs = getGroupByForClause(parseInfo, dest);
ArrayList<String> outputColumnNames = new ArrayList<String>();
for (int i = 0; i < grpByExprs.size(); ++i) {
ASTNode grpbyExpr = grpByExprs.get(i);
ColumnInfo exprInfo = groupByInputRowResolver2.getExpression(grpbyExpr);
if (exprInfo == null) {
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(grpbyExpr));
}
String expression = exprInfo.getInternalName();
groupByKeys.add(new ExprNodeColumnDesc(exprInfo.getType(), expression,
exprInfo.getTabAlias(), exprInfo.getIsVirtualCol()));
String field = getColumnInternalName(i);
outputColumnNames.add(field);
ColumnInfo oColInfo = new ColumnInfo(field, exprInfo.getType(), "", false);
groupByOutputRowResolver2.putExpression(grpbyExpr,
oColInfo);
addAlternateGByKeyMappings(grpbyExpr, oColInfo, reduceSinkOperatorInfo2, groupByOutputRowResolver2);
colExprMap.put(field, groupByKeys.get(groupByKeys.size() - 1));
}
// For grouping sets, add a dummy grouping key
if (groupingSetsPresent) {
addGroupingSetKey(
groupByKeys,
groupByInputRowResolver2,
groupByOutputRowResolver2,
outputColumnNames,
colExprMap);
}
HashMap<String, ASTNode> aggregationTrees = parseInfo
.getAggregationExprsForClause(dest);
boolean containsDistinctAggr = false;
for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
ArrayList<ExprNodeDesc> aggParameters = new ArrayList<ExprNodeDesc>();
ASTNode value = entry.getValue();
ColumnInfo paraExprInfo = groupByInputRowResolver2.getExpression(value);
if (paraExprInfo == null) {
throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(value));
}
String paraExpression = paraExprInfo.getInternalName();
assert (paraExpression != null);
aggParameters.add(new ExprNodeColumnDesc(paraExprInfo.getType(),
paraExpression, paraExprInfo.getTabAlias(), paraExprInfo
.getIsVirtualCol()));
String aggName = unescapeIdentifier(value.getChild(0).getText());
boolean isDistinct = value.getType() == HiveParser.TOK_FUNCTIONDI;
containsDistinctAggr = containsDistinctAggr || isDistinct;
boolean isStar = value.getType() == HiveParser.TOK_FUNCTIONSTAR;
Mode amode = groupByDescModeToUDAFMode(mode, isDistinct);
GenericUDAFEvaluator genericUDAFEvaluator = genericUDAFEvaluators
.get(entry.getKey());
assert (genericUDAFEvaluator != null);
GenericUDAFInfo udaf = getGenericUDAFInfo(genericUDAFEvaluator, amode,
aggParameters);
aggregations
.add(new AggregationDesc(
aggName.toLowerCase(),
udaf.genericUDAFEvaluator,
udaf.convertedParameters,
(mode != GroupByDesc.Mode.FINAL && value.getToken().getType() ==
HiveParser.TOK_FUNCTIONDI),
amode));
String field = getColumnInternalName(groupByKeys.size()
+ aggregations.size() - 1);
outputColumnNames.add(field);
groupByOutputRowResolver2.putExpression(value, new ColumnInfo(
field, udaf.returnType, "", false));
}
float groupByMemoryUsage = HiveConf.getFloatVar(conf, HiveConf.ConfVars.HIVEMAPAGGRHASHMEMORY);
float memoryThreshold = HiveConf
.getFloatVar(conf, HiveConf.ConfVars.HIVEMAPAGGRMEMORYTHRESHOLD);
Operator op = putOpInsertMap(OperatorFactory.getAndMakeChild(
new GroupByDesc(mode, outputColumnNames, groupByKeys, aggregations,
false, groupByMemoryUsage, memoryThreshold, null, false, 0, containsDistinctAggr),
new RowSchema(groupByOutputRowResolver2.getColumnInfos()),
reduceSinkOperatorInfo2), groupByOutputRowResolver2);
op.setColumnExprMap(colExprMap);
return op;
}
/**
* Generate a Group-By plan using a single map-reduce job (3 operators will be
* inserted):
*
* ReduceSink ( keys = (K1_EXP, K2_EXP, DISTINCT_EXP), values = (A1_EXP,
* A2_EXP) ) SortGroupBy (keys = (KEY.0,KEY.1), aggregations =
* (count_distinct(KEY.2), sum(VALUE.0), count(VALUE.1))) Select (final
* selects).
*
* @param dest
* @param qb
* @param input
* @return
* @throws SemanticException
*
* Generate a Group-By plan using 1 map-reduce job. Spray by the
* group by key, and sort by the distinct key (if any), and compute
* aggregates * The aggregation evaluation functions are as
* follows: Partitioning Key: grouping key
*
* Sorting Key: grouping key if no DISTINCT grouping + distinct key
* if DISTINCT
*
* Reducer: iterate/merge (mode = COMPLETE)
**/
@SuppressWarnings({"nls"})
private Operator genGroupByPlan1MR(String dest, QB qb, Operator input)
throws SemanticException {
QBParseInfo parseInfo = qb.getParseInfo();
int numReducers = -1;
ObjectPair<List<ASTNode>, List<Integer>> grpByExprsGroupingSets =
getGroupByGroupingSetsForClause(parseInfo, dest);
List<ASTNode> grpByExprs = grpByExprsGroupingSets.getFirst();
List<Integer> groupingSets = grpByExprsGroupingSets.getSecond();
if (grpByExprs.isEmpty()) {
numReducers = 1;
}
// Grouping sets are not allowed
if (!groupingSets.isEmpty()) {
throw new SemanticException(ErrorMsg.HIVE_GROUPING_SETS_AGGR_NOMAPAGGR.getMsg());
}
// ////// 1. Generate ReduceSinkOperator
ReduceSinkOperator reduceSinkOperatorInfo =
genGroupByPlanReduceSinkOperator(qb,
dest,
input,
grpByExprs,
grpByExprs.size(),
false,
numReducers,
false,
false);
// ////// 2. Generate GroupbyOperator
Operator groupByOperatorInfo = genGroupByPlanGroupByOperator(parseInfo,
dest, reduceSinkOperatorInfo, reduceSinkOperatorInfo, GroupByDesc.Mode.COMPLETE, null);
return groupByOperatorInfo;
}
@SuppressWarnings({"nls"})
private Operator genGroupByPlan1ReduceMultiGBY(List<String> dests, QB qb, Operator input,
Map<String, Operator> aliasToOpInfo)
throws SemanticException {
QBParseInfo parseInfo = qb.getParseInfo();
ExprNodeDesc previous = null;
Operator selectInput = input;
// In order to facilitate partition pruning, or the where clauses together and put them at the
// top of the operator tree, this could also reduce the amount of data going to the reducer
List<ExprNodeDesc.ExprNodeDescEqualityWrapper> whereExpressions =
new ArrayList<ExprNodeDesc.ExprNodeDescEqualityWrapper>();
for (String dest : dests) {
ASTNode whereExpr = parseInfo.getWhrForClause(dest);
if (whereExpr != null) {
OpParseContext inputCtx = opParseCtx.get(input);
RowResolver inputRR = inputCtx.getRowResolver();
ExprNodeDesc current = genExprNodeDesc((ASTNode) whereExpr.getChild(0), inputRR);
// Check the list of where expressions already added so they aren't duplicated
ExprNodeDesc.ExprNodeDescEqualityWrapper currentWrapped =
new ExprNodeDesc.ExprNodeDescEqualityWrapper(current);
if (!whereExpressions.contains(currentWrapped)) {
whereExpressions.add(currentWrapped);
} else {
continue;
}
if (previous == null) {
// If this is the first expression
previous = current;
continue;
}
GenericUDFOPOr or = new GenericUDFOPOr();
List<ExprNodeDesc> expressions = new ArrayList<ExprNodeDesc>(2);
expressions.add(previous);
expressions.add(current);
ExprNodeDesc orExpr =
new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, or, expressions);
previous = orExpr;
} else {
// If an expression does not have a where clause, there can be no common filter
previous = null;
break;
}
}
if (previous != null) {
OpParseContext inputCtx = opParseCtx.get(input);
RowResolver inputRR = inputCtx.getRowResolver();
FilterDesc orFilterDesc = new FilterDesc(previous, false);
selectInput = putOpInsertMap(OperatorFactory.getAndMakeChild(
orFilterDesc, new RowSchema(
inputRR.getColumnInfos()), input), inputRR);
}
// insert a select operator here used by the ColumnPruner to reduce
// the data to shuffle
Operator select = insertSelectAllPlanForGroupBy(selectInput);
// Generate ReduceSinkOperator
ReduceSinkOperator reduceSinkOperatorInfo =
genCommonGroupByPlanReduceSinkOperator(qb, dests, select);
// It is assumed throughout the code that a reducer has a single child, add a
// ForwardOperator so that we can add multiple filter/group by operators as children
RowResolver reduceSinkOperatorInfoRR = opParseCtx.get(reduceSinkOperatorInfo).getRowResolver();
Operator forwardOp = putOpInsertMap(OperatorFactory.getAndMakeChild(new ForwardDesc(),
new RowSchema(reduceSinkOperatorInfoRR.getColumnInfos()), reduceSinkOperatorInfo),
reduceSinkOperatorInfoRR);
Operator curr = forwardOp;
for (String dest : dests) {
curr = forwardOp;
if (parseInfo.getWhrForClause(dest) != null) {
ASTNode whereExpr = qb.getParseInfo().getWhrForClause(dest);
curr = genFilterPlan((ASTNode) whereExpr.getChild(0), qb, forwardOp, aliasToOpInfo, false);
}
// Generate GroupbyOperator
Operator groupByOperatorInfo = genGroupByPlanGroupByOperator(parseInfo,
dest, curr, reduceSinkOperatorInfo, GroupByDesc.Mode.COMPLETE, null);
curr = genPostGroupByBodyPlan(groupByOperatorInfo, dest, qb, aliasToOpInfo);
}
return curr;
}
static ArrayList<GenericUDAFEvaluator> getUDAFEvaluators(
ArrayList<AggregationDesc> aggs) {
ArrayList<GenericUDAFEvaluator> result = new ArrayList<GenericUDAFEvaluator>();
for (int i = 0; i < aggs.size(); i++) {
result.add(aggs.get(i).getGenericUDAFEvaluator());
}
return result;
}
/**
* Generate a Multi Group-By plan using a 2 map-reduce jobs.
*
* @param dest
* @param qb
* @param input
* @return
* @throws SemanticException
*
* Generate a Group-By plan using a 2 map-reduce jobs. Spray by the
* distinct key in hope of getting a uniform distribution, and
* compute partial aggregates by the grouping key. Evaluate partial
* aggregates first, and spray by the grouping key to compute actual
* aggregates in the second phase. The aggregation evaluation
* functions are as follows: Partitioning Key: distinct key
*
* Sorting Key: distinct key
*
* Reducer: iterate/terminatePartial (mode = PARTIAL1)
*
* STAGE 2
*
* Partitioning Key: grouping key
*
* Sorting Key: grouping key
*
* Reducer: merge/terminate (mode = FINAL)
*/
@SuppressWarnings("nls")
private Operator genGroupByPlan2MRMultiGroupBy(String dest, QB qb,
Operator input) throws SemanticException {
// ////// Generate GroupbyOperator for a map-side partial aggregation
Map<String, GenericUDAFEvaluator> genericUDAFEvaluators =
new LinkedHashMap<String, GenericUDAFEvaluator>();
QBParseInfo parseInfo = qb.getParseInfo();
// ////// 2. Generate GroupbyOperator
Operator groupByOperatorInfo = genGroupByPlanGroupByOperator1(parseInfo,
dest, input, GroupByDesc.Mode.HASH, genericUDAFEvaluators, true,
null, false, false);
int numReducers = -1;
List<ASTNode> grpByExprs = getGroupByForClause(parseInfo, dest);
// ////// 3. Generate ReduceSinkOperator2
Operator reduceSinkOperatorInfo2 = genGroupByPlanReduceSinkOperator2MR(
parseInfo, dest, groupByOperatorInfo, grpByExprs.size(), numReducers, false);
// ////// 4. Generate GroupbyOperator2
Operator groupByOperatorInfo2 = genGroupByPlanGroupByOperator2MR(parseInfo,
dest, reduceSinkOperatorInfo2, GroupByDesc.Mode.FINAL,
genericUDAFEvaluators, false);
return groupByOperatorInfo2;
}
/**
* Generate a Group-By plan using a 2 map-reduce jobs (5 operators will be
* inserted):
*
* ReduceSink ( keys = (K1_EXP, K2_EXP, DISTINCT_EXP), values = (A1_EXP,
* A2_EXP) ) NOTE: If DISTINCT_EXP is null, partition by rand() SortGroupBy
* (keys = (KEY.0,KEY.1), aggregations = (count_distinct(KEY.2), sum(VALUE.0),
* count(VALUE.1))) ReduceSink ( keys = (0,1), values=(2,3,4)) SortGroupBy
* (keys = (KEY.0,KEY.1), aggregations = (sum(VALUE.0), sum(VALUE.1),
* sum(VALUE.2))) Select (final selects).
*
* @param dest
* @param qb
* @param input
* @return
* @throws SemanticException
*
* Generate a Group-By plan using a 2 map-reduce jobs. Spray by the
* grouping key and distinct key (or a random number, if no distinct
* is present) in hope of getting a uniform distribution, and
* compute partial aggregates grouped by the reduction key (grouping
* key + distinct key). Evaluate partial aggregates first, and spray
* by the grouping key to compute actual aggregates in the second
* phase. The aggregation evaluation functions are as follows:
* Partitioning Key: random() if no DISTINCT grouping + distinct key
* if DISTINCT
*
* Sorting Key: grouping key if no DISTINCT grouping + distinct key
* if DISTINCT
*
* Reducer: iterate/terminatePartial (mode = PARTIAL1)
*
* STAGE 2
*
* Partitioning Key: grouping key
*
* Sorting Key: grouping key if no DISTINCT grouping + distinct key
* if DISTINCT
*
* Reducer: merge/terminate (mode = FINAL)
*/
@SuppressWarnings("nls")
private Operator genGroupByPlan2MR(String dest, QB qb, Operator input)
throws SemanticException {
QBParseInfo parseInfo = qb.getParseInfo();
ObjectPair<List<ASTNode>, List<Integer>> grpByExprsGroupingSets =
getGroupByGroupingSetsForClause(parseInfo, dest);
List<ASTNode> grpByExprs = grpByExprsGroupingSets.getFirst();
List<Integer> groupingSets = grpByExprsGroupingSets.getSecond();
// Grouping sets are not allowed
// This restriction can be lifted in future.
// HIVE-3508 has been filed for this
if (!groupingSets.isEmpty()) {
throw new SemanticException(ErrorMsg.HIVE_GROUPING_SETS_AGGR_NOMAPAGGR.getMsg());
}
// ////// 1. Generate ReduceSinkOperator
// There is a special case when we want the rows to be randomly distributed
// to
// reducers for load balancing problem. That happens when there is no
// DISTINCT
// operator. We set the numPartitionColumns to -1 for this purpose. This is
// captured by WritableComparableHiveObject.hashCode() function.
ReduceSinkOperator reduceSinkOperatorInfo =
genGroupByPlanReduceSinkOperator(qb,
dest,
input,
grpByExprs,
(parseInfo.getDistinctFuncExprsForClause(dest).isEmpty() ? -1 : Integer.MAX_VALUE),
false,
-1,
false,
false);
// ////// 2. Generate GroupbyOperator
Map<String, GenericUDAFEvaluator> genericUDAFEvaluators =
new LinkedHashMap<String, GenericUDAFEvaluator>();
GroupByOperator groupByOperatorInfo = (GroupByOperator) genGroupByPlanGroupByOperator(
parseInfo, dest, reduceSinkOperatorInfo, reduceSinkOperatorInfo, GroupByDesc.Mode.PARTIAL1,
genericUDAFEvaluators);
int numReducers = -1;
if (grpByExprs.isEmpty()) {
numReducers = 1;
}
// ////// 3. Generate ReduceSinkOperator2
Operator reduceSinkOperatorInfo2 = genGroupByPlanReduceSinkOperator2MR(
parseInfo, dest, groupByOperatorInfo, grpByExprs.size(), numReducers, false);
// ////// 4. Generate GroupbyOperator2
Operator groupByOperatorInfo2 = genGroupByPlanGroupByOperator2MR(parseInfo,
dest, reduceSinkOperatorInfo2, GroupByDesc.Mode.FINAL,
genericUDAFEvaluators, false);
return groupByOperatorInfo2;
}
private boolean optimizeMapAggrGroupBy(String dest, QB qb) {
List<ASTNode> grpByExprs = getGroupByForClause(qb.getParseInfo(), dest);
if ((grpByExprs != null) && !grpByExprs.isEmpty()) {
return false;
}
if (!qb.getParseInfo().getDistinctFuncExprsForClause(dest).isEmpty()) {
return false;
}
return true;
}
static private void extractColumns(Set<String> colNamesExprs,
ExprNodeDesc exprNode) throws SemanticException {
if (exprNode instanceof ExprNodeColumnDesc) {
colNamesExprs.add(((ExprNodeColumnDesc) exprNode).getColumn());
return;
}
if (exprNode instanceof ExprNodeGenericFuncDesc) {
ExprNodeGenericFuncDesc funcDesc = (ExprNodeGenericFuncDesc) exprNode;
for (ExprNodeDesc childExpr : funcDesc.getChildren()) {
extractColumns(colNamesExprs, childExpr);
}
}
}
static private boolean hasCommonElement(Set<String> set1, Set<String> set2) {
for (String elem1 : set1) {
if (set2.contains(elem1)) {
return true;
}
}
return false;
}
private void checkExpressionsForGroupingSet(List<ASTNode> grpByExprs,
List<ASTNode> distinctGrpByExprs,
Map<String, ASTNode> aggregationTrees,
RowResolver inputRowResolver) throws SemanticException {
Set<String> colNamesGroupByExprs = new HashSet<String>();
Set<String> colNamesGroupByDistinctExprs = new HashSet<String>();
Set<String> colNamesAggregateParameters = new HashSet<String>();
// The columns in the group by expressions should not intersect with the columns in the
// distinct expressions
for (ASTNode grpByExpr : grpByExprs) {
extractColumns(colNamesGroupByExprs, genExprNodeDesc(grpByExpr, inputRowResolver));
}
// If there is a distinctFuncExp, add all parameters to the reduceKeys.
if (!distinctGrpByExprs.isEmpty()) {
for (ASTNode value : distinctGrpByExprs) {
// 0 is function name
for (int i = 1; i < value.getChildCount(); i++) {
ASTNode parameter = (ASTNode) value.getChild(i);
ExprNodeDesc distExprNode = genExprNodeDesc(parameter, inputRowResolver);
// extract all the columns
extractColumns(colNamesGroupByDistinctExprs, distExprNode);
}
if (hasCommonElement(colNamesGroupByExprs, colNamesGroupByDistinctExprs)) {
throw new SemanticException(ErrorMsg.HIVE_GROUPING_SETS_AGGR_EXPRESSION_INVALID.getMsg());
}
}
}
for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
ASTNode value = entry.getValue();
ArrayList<ExprNodeDesc> aggParameters = new ArrayList<ExprNodeDesc>();
// 0 is the function name
for (int i = 1; i < value.getChildCount(); i++) {
ASTNode paraExpr = (ASTNode) value.getChild(i);
ExprNodeDesc paraExprNode = genExprNodeDesc(paraExpr, inputRowResolver);
// extract all the columns
extractColumns(colNamesAggregateParameters, paraExprNode);
}
if (hasCommonElement(colNamesGroupByExprs, colNamesAggregateParameters)) {
throw new SemanticException(ErrorMsg.HIVE_GROUPING_SETS_AGGR_EXPRESSION_INVALID.getMsg());
}
}
}
/**
* Generate a Group-By plan using 1 map-reduce job. First perform a map-side
* partial aggregation (to reduce the amount of data), at this point of time,
* we may turn off map-side partial aggregation based on its performance. Then
* spray by the group by key, and sort by the distinct key (if any), and
* compute aggregates based on actual aggregates
*
* The aggregation evaluation functions are as follows:
*
* No grouping sets:
* Group By Operator:
* grouping keys: group by expressions if no DISTINCT
* grouping keys: group by expressions + distinct keys if DISTINCT
* Mapper: iterate/terminatePartial (mode = HASH)
* Partitioning Key: grouping key
* Sorting Key: grouping key if no DISTINCT
* grouping + distinct key if DISTINCT
* Reducer: iterate/terminate if DISTINCT
* merge/terminate if NO DISTINCT (mode MERGEPARTIAL)
*
* Grouping Sets:
* Group By Operator:
* grouping keys: group by expressions + grouping id. if no DISTINCT
* grouping keys: group by expressions + grouping id. + distinct keys if DISTINCT
* Mapper: iterate/terminatePartial (mode = HASH)
* Partitioning Key: grouping key + grouping id.
* Sorting Key: grouping key + grouping id. if no DISTINCT
* grouping + grouping id. + distinct key if DISTINCT
* Reducer: iterate/terminate if DISTINCT
* merge/terminate if NO DISTINCT (mode MERGEPARTIAL)
*
* Grouping Sets with an additional MR job introduced (distincts are not allowed):
* Group By Operator:
* grouping keys: group by expressions
* Mapper: iterate/terminatePartial (mode = HASH)
* Partitioning Key: grouping key
* Sorting Key: grouping key
* Reducer: merge/terminate (mode MERGEPARTIAL)
* Group by Operator:
* grouping keys: group by expressions + add a new grouping id. key
*
* STAGE 2
* Partitioning Key: grouping key + grouping id.
* Sorting Key: grouping key + grouping id.
* Reducer: merge/terminate (mode = FINAL)
* Group by Operator:
* grouping keys: group by expressions + grouping id.
*/
@SuppressWarnings("nls")
private Operator genGroupByPlanMapAggrNoSkew(String dest, QB qb,
Operator inputOperatorInfo) throws SemanticException {
QBParseInfo parseInfo = qb.getParseInfo();
ObjectPair<List<ASTNode>, List<Integer>> grpByExprsGroupingSets =
getGroupByGroupingSetsForClause(parseInfo, dest);
List<ASTNode> grpByExprs = grpByExprsGroupingSets.getFirst();
List<Integer> groupingSets = grpByExprsGroupingSets.getSecond();
boolean groupingSetsPresent = !groupingSets.isEmpty();
int newMRJobGroupingSetsThreshold =
conf.getIntVar(HiveConf.ConfVars.HIVE_NEW_JOB_GROUPING_SET_CARDINALITY);
if (groupingSetsPresent) {
checkExpressionsForGroupingSet(grpByExprs,
parseInfo.getDistinctFuncExprsForClause(dest),
parseInfo.getAggregationExprsForClause(dest),
opParseCtx.get(inputOperatorInfo).getRowResolver());
}
// ////// Generate GroupbyOperator for a map-side partial aggregation
Map<String, GenericUDAFEvaluator> genericUDAFEvaluators =
new LinkedHashMap<String, GenericUDAFEvaluator>();
// Is the grouping sets data consumed in the current in MR job, or
// does it need an additional MR job
boolean groupingSetsNeedAdditionalMRJob =
groupingSetsPresent && groupingSets.size() > newMRJobGroupingSetsThreshold ?
true : false;
GroupByOperator groupByOperatorInfo =
(GroupByOperator) genGroupByPlanMapGroupByOperator(
qb,
dest,
grpByExprs,
inputOperatorInfo,
GroupByDesc.Mode.HASH,
genericUDAFEvaluators,
groupingSets,
groupingSetsPresent && !groupingSetsNeedAdditionalMRJob);
groupOpToInputTables.put(groupByOperatorInfo, opParseCtx.get(
inputOperatorInfo).getRowResolver().getTableNames());
int numReducers = -1;
// Optimize the scenario when there are no grouping keys - only 1 reducer is
// needed
if (grpByExprs.isEmpty()) {
numReducers = 1;
}
// ////// Generate ReduceSink Operator
boolean isDistinct = !qb.getParseInfo().getDistinctFuncExprsForClause(dest).isEmpty();
// Distincts are not allowed with an additional mr job
if (groupingSetsNeedAdditionalMRJob && isDistinct) {
String errorMsg = "The number of rows per input row due to grouping sets is "
+ groupingSets.size();
throw new SemanticException(
ErrorMsg.HIVE_GROUPING_SETS_THRESHOLD_NOT_ALLOWED_WITH_DISTINCTS.getMsg(errorMsg));
}
Operator reduceSinkOperatorInfo =
genGroupByPlanReduceSinkOperator(qb,
dest,
groupByOperatorInfo,
grpByExprs,
grpByExprs.size(),
true,
numReducers,
true,
groupingSetsPresent && !groupingSetsNeedAdditionalMRJob);
// Does it require a new MR job for grouping sets
if (!groupingSetsPresent || !groupingSetsNeedAdditionalMRJob) {
// This is a 1-stage map-reduce processing of the groupby. Tha map-side
// aggregates was just used to
// reduce output data. In case of distincts, partial results are not used,
// and so iterate is again
// invoked on the reducer. In case of non-distincts, partial results are
// used, and merge is invoked
// on the reducer.
return genGroupByPlanGroupByOperator1(parseInfo, dest,
reduceSinkOperatorInfo, GroupByDesc.Mode.MERGEPARTIAL,
genericUDAFEvaluators, false,
groupingSets, groupingSetsPresent, groupingSetsNeedAdditionalMRJob);
}
else
{
// Add 'n' rows corresponding to the grouping sets. For each row, create 'n' rows,
// one for each grouping set key. Since map-side aggregation has already been performed,
// the number of rows would have been reduced. Moreover, the rows corresponding to the
// grouping keys come together, so there is a higher chance of finding the rows in the hash
// table.
Operator groupByOperatorInfo2 =
genGroupByPlanGroupByOperator1(parseInfo, dest,
reduceSinkOperatorInfo, GroupByDesc.Mode.PARTIALS,
genericUDAFEvaluators, false,
groupingSets, groupingSetsPresent, groupingSetsNeedAdditionalMRJob);
// ////// Generate ReduceSinkOperator2
Operator reduceSinkOperatorInfo2 = genGroupByPlanReduceSinkOperator2MR(
parseInfo, dest, groupByOperatorInfo2, grpByExprs.size() + 1, numReducers,
groupingSetsPresent);
// ////// Generate GroupbyOperator3
return genGroupByPlanGroupByOperator2MR(parseInfo, dest,
reduceSinkOperatorInfo2, GroupByDesc.Mode.FINAL,
genericUDAFEvaluators, groupingSetsPresent);
}
}
/**
* Generate a Group-By plan using a 2 map-reduce jobs. However, only 1
* group-by plan is generated if the query involves no grouping key and no
* distincts. In that case, the plan is same as generated by
* genGroupByPlanMapAggr1MR. Otherwise, the following plan is generated: First
* perform a map side partial aggregation (to reduce the amount of data). Then
* spray by the grouping key and distinct key (or a random number, if no
* distinct is present) in hope of getting a uniform distribution, and compute
* partial aggregates grouped by the reduction key (grouping key + distinct
* key). Evaluate partial aggregates first, and spray by the grouping key to
* compute actual aggregates in the second phase.
*
* The aggregation evaluation functions are as follows:
*
* No grouping sets:
* STAGE 1
* Group by Operator:
* grouping keys: group by expressions if no DISTINCT
* grouping keys: group by expressions + distinct keys if DISTINCT
* Mapper: iterate/terminatePartial (mode = HASH)
* Partitioning Key: random() if no DISTINCT
* grouping + distinct key if DISTINCT
* Sorting Key: grouping key if no DISTINCT
* grouping + distinct key if DISTINCT
* Reducer: iterate/terminatePartial if DISTINCT
* merge/terminatePartial if NO DISTINCT (mode = MERGEPARTIAL)
* Group by Operator:
* grouping keys: group by expressions
*
* STAGE 2
* Partitioning Key: grouping key
* Sorting Key: grouping key
* Reducer: merge/terminate (mode = FINAL)
*
* In the presence of grouping sets, the aggregation evaluation functions are as follows:
* STAGE 1
* Group by Operator:
* grouping keys: group by expressions + grouping id. if no DISTINCT
* grouping keys: group by expressions + + grouping id. + distinct keys if DISTINCT
* Mapper: iterate/terminatePartial (mode = HASH)
* Partitioning Key: random() if no DISTINCT
* grouping + grouping id. + distinct key if DISTINCT
* Sorting Key: grouping key + grouping id. if no DISTINCT
* grouping + grouping id. + distinct key if DISTINCT
* Reducer: iterate/terminatePartial if DISTINCT
* merge/terminatePartial if NO DISTINCT (mode = MERGEPARTIAL)
* Group by Operator:
* grouping keys: group by expressions + grouping id.
*
* STAGE 2
* Partitioning Key: grouping key
* Sorting Key: grouping key + grouping id.
* Reducer: merge/terminate (mode = FINAL)
*/
@SuppressWarnings("nls")
private Operator genGroupByPlanMapAggr2MR(String dest, QB qb,
Operator inputOperatorInfo) throws SemanticException {
QBParseInfo parseInfo = qb.getParseInfo();
ObjectPair<List<ASTNode>, List<Integer>> grpByExprsGroupingSets =
getGroupByGroupingSetsForClause(parseInfo, dest);
List<ASTNode> grpByExprs = grpByExprsGroupingSets.getFirst();
List<Integer> groupingSets = grpByExprsGroupingSets.getSecond();
boolean groupingSetsPresent = !groupingSets.isEmpty();
if (groupingSetsPresent) {
checkExpressionsForGroupingSet(grpByExprs,
parseInfo.getDistinctFuncExprsForClause(dest),
parseInfo.getAggregationExprsForClause(dest),
opParseCtx.get(inputOperatorInfo).getRowResolver());
int newMRJobGroupingSetsThreshold =
conf.getIntVar(HiveConf.ConfVars.HIVE_NEW_JOB_GROUPING_SET_CARDINALITY);
// Turn off skew if an additional MR job is required anyway for grouping sets.
if (groupingSets.size() > newMRJobGroupingSetsThreshold) {
String errorMsg = "The number of rows per input row due to grouping sets is "
+ groupingSets.size();
throw new SemanticException(
ErrorMsg.HIVE_GROUPING_SETS_THRESHOLD_NOT_ALLOWED_WITH_SKEW.getMsg(errorMsg));
}
}
// ////// Generate GroupbyOperator for a map-side partial aggregation
Map<String, GenericUDAFEvaluator> genericUDAFEvaluators =
new LinkedHashMap<String, GenericUDAFEvaluator>();
GroupByOperator groupByOperatorInfo =
(GroupByOperator) genGroupByPlanMapGroupByOperator(
qb, dest, grpByExprs, inputOperatorInfo, GroupByDesc.Mode.HASH,
genericUDAFEvaluators, groupingSets, groupingSetsPresent);
groupOpToInputTables.put(groupByOperatorInfo, opParseCtx.get(
inputOperatorInfo).getRowResolver().getTableNames());
// Optimize the scenario when there are no grouping keys and no distinct - 2
// map-reduce jobs are not needed
// For eg: select count(1) from T where t.ds = ....
if (!optimizeMapAggrGroupBy(dest, qb)) {
List<ASTNode> distinctFuncExprs = parseInfo.getDistinctFuncExprsForClause(dest);
// ////// Generate ReduceSink Operator
Operator reduceSinkOperatorInfo =
genGroupByPlanReduceSinkOperator(qb,
dest,
groupByOperatorInfo,
grpByExprs,
distinctFuncExprs.isEmpty() ? -1 : Integer.MAX_VALUE,
false,
-1,
true,
groupingSetsPresent);
// ////// Generate GroupbyOperator for a partial aggregation
Operator groupByOperatorInfo2 = genGroupByPlanGroupByOperator1(parseInfo,
dest, reduceSinkOperatorInfo, GroupByDesc.Mode.PARTIALS,
genericUDAFEvaluators, false,
groupingSets, groupingSetsPresent, false);
int numReducers = -1;
if (grpByExprs.isEmpty()) {
numReducers = 1;
}
// ////// Generate ReduceSinkOperator2
Operator reduceSinkOperatorInfo2 = genGroupByPlanReduceSinkOperator2MR(
parseInfo, dest, groupByOperatorInfo2, grpByExprs.size(), numReducers,
groupingSetsPresent);
// ////// Generate GroupbyOperator3
return genGroupByPlanGroupByOperator2MR(parseInfo, dest,
reduceSinkOperatorInfo2, GroupByDesc.Mode.FINAL,
genericUDAFEvaluators, groupingSetsPresent);
} else {
// If there are no grouping keys, grouping sets cannot be present
assert !groupingSetsPresent;
// ////// Generate ReduceSink Operator
Operator reduceSinkOperatorInfo =
genGroupByPlanReduceSinkOperator(qb,
dest,
groupByOperatorInfo,
grpByExprs,
grpByExprs.size(),
false,
1,
true,
groupingSetsPresent);
return genGroupByPlanGroupByOperator2MR(parseInfo, dest,
reduceSinkOperatorInfo, GroupByDesc.Mode.FINAL, genericUDAFEvaluators, false);
}
}
@SuppressWarnings("nls")
private Operator genConversionOps(String dest, QB qb, Operator input)
throws SemanticException {
Integer dest_type = qb.getMetaData().getDestTypeForAlias(dest);
switch (dest_type.intValue()) {
case QBMetaData.DEST_TABLE: {
qb.getMetaData().getDestTableForAlias(dest);
break;
}
case QBMetaData.DEST_PARTITION: {
qb.getMetaData().getDestPartitionForAlias(dest).getTable();
break;
}
default: {
return input;
}
}
return input;
}
private int getReducersBucketing(int totalFiles, int maxReducers) {
int numFiles = (int)Math.ceil((double)totalFiles / (double)maxReducers);
while (true) {
if (totalFiles % numFiles == 0) {
return totalFiles / numFiles;
}
numFiles++;
}
}
private static class SortBucketRSCtx {
ArrayList<ExprNodeDesc> partnCols;
boolean multiFileSpray;
int numFiles;
int totalFiles;
public SortBucketRSCtx() {
partnCols = null;
multiFileSpray = false;
numFiles = 1;
totalFiles = 1;
}
/**
* @return the partnCols
*/
public ArrayList<ExprNodeDesc> getPartnCols() {
return partnCols;
}
/**
* @param partnCols
* the partnCols to set
*/
public void setPartnCols(ArrayList<ExprNodeDesc> partnCols) {
this.partnCols = partnCols;
}
/**
* @return the multiFileSpray
*/
public boolean isMultiFileSpray() {
return multiFileSpray;
}
/**
* @param multiFileSpray
* the multiFileSpray to set
*/
public void setMultiFileSpray(boolean multiFileSpray) {
this.multiFileSpray = multiFileSpray;
}
/**
* @return the numFiles
*/
public int getNumFiles() {
return numFiles;
}
/**
* @param numFiles
* the numFiles to set
*/
public void setNumFiles(int numFiles) {
this.numFiles = numFiles;
}
/**
* @return the totalFiles
*/
public int getTotalFiles() {
return totalFiles;
}
/**
* @param totalFiles
* the totalFiles to set
*/
public void setTotalFiles(int totalFiles) {
this.totalFiles = totalFiles;
}
}
@SuppressWarnings("nls")
private Operator genBucketingSortingDest(String dest, Operator input, QB qb,
TableDesc table_desc, Table dest_tab, SortBucketRSCtx ctx) throws SemanticException {
// If the table is bucketed, and bucketing is enforced, do the following:
// If the number of buckets is smaller than the number of maximum reducers,
// create those many reducers.
// If not, create a multiFileSink instead of FileSink - the multiFileSink will
// spray the data into multiple buckets. That way, we can support a very large
// number of buckets without needing a very large number of reducers.
boolean enforceBucketing = false;
boolean enforceSorting = false;
ArrayList<ExprNodeDesc> partnCols = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> partnColsNoConvert = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> sortCols = new ArrayList<ExprNodeDesc>();
ArrayList<Integer> sortOrders = new ArrayList<Integer>();
boolean multiFileSpray = false;
int numFiles = 1;
int totalFiles = 1;
if ((dest_tab.getNumBuckets() > 0) &&
(conf.getBoolVar(HiveConf.ConfVars.HIVEENFORCEBUCKETING))) {
enforceBucketing = true;
partnCols = getPartitionColsFromBucketCols(dest, qb, dest_tab, table_desc, input, true);
partnColsNoConvert = getPartitionColsFromBucketCols(dest, qb, dest_tab, table_desc, input,
false);
}
if ((dest_tab.getSortCols() != null) &&
(dest_tab.getSortCols().size() > 0) &&
(conf.getBoolVar(HiveConf.ConfVars.HIVEENFORCESORTING))) {
enforceSorting = true;
sortCols = getSortCols(dest, qb, dest_tab, table_desc, input, true);
sortOrders = getSortOrders(dest, qb, dest_tab, input);
if (!enforceBucketing) {
partnCols = sortCols;
partnColsNoConvert = getSortCols(dest, qb, dest_tab, table_desc, input, false);
}
}
if (enforceBucketing || enforceSorting) {
int maxReducers = conf.getIntVar(HiveConf.ConfVars.MAXREDUCERS);
if (conf.getIntVar(HiveConf.ConfVars.HADOOPNUMREDUCERS) > 0) {
maxReducers = conf.getIntVar(HiveConf.ConfVars.HADOOPNUMREDUCERS);
}
int numBuckets = dest_tab.getNumBuckets();
if (numBuckets > maxReducers) {
multiFileSpray = true;
totalFiles = numBuckets;
if (totalFiles % maxReducers == 0) {
numFiles = totalFiles / maxReducers;
}
else {
// find the number of reducers such that it is a divisor of totalFiles
maxReducers = getReducersBucketing(totalFiles, maxReducers);
numFiles = totalFiles / maxReducers;
}
}
else {
maxReducers = numBuckets;
}
input = genReduceSinkPlanForSortingBucketing(dest_tab, input,
sortCols, sortOrders, partnCols, maxReducers);
ctx.setMultiFileSpray(multiFileSpray);
ctx.setNumFiles(numFiles);
ctx.setPartnCols(partnColsNoConvert);
ctx.setTotalFiles(totalFiles);
}
return input;
}
/**
* Check for HOLD_DDLTIME hint.
*
* @param qb
* @return true if HOLD_DDLTIME is set, false otherwise.
*/
private boolean checkHoldDDLTime(QB qb) {
ASTNode hints = qb.getParseInfo().getHints();
if (hints == null) {
return false;
}
for (int pos = 0; pos < hints.getChildCount(); pos++) {
ASTNode hint = (ASTNode) hints.getChild(pos);
if (((ASTNode) hint.getChild(0)).getToken().getType() == HiveParser.TOK_HOLD_DDLTIME) {
return true;
}
}
return false;
}
@SuppressWarnings("nls")
private Operator genFileSinkPlan(String dest, QB qb, Operator input)
throws SemanticException {
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
QBMetaData qbm = qb.getMetaData();
Integer dest_type = qbm.getDestTypeForAlias(dest);
Table dest_tab = null; // destination table if any
Partition dest_part = null;// destination partition if any
Path queryTmpdir = null; // the intermediate destination directory
Path dest_path = null; // the final destination directory
TableDesc table_desc = null;
int currentTableId = 0;
boolean isLocal = false;
SortBucketRSCtx rsCtx = new SortBucketRSCtx();
DynamicPartitionCtx dpCtx = null;
LoadTableDesc ltd = null;
boolean holdDDLTime = checkHoldDDLTime(qb);
ListBucketingCtx lbCtx = null;
switch (dest_type.intValue()) {
case QBMetaData.DEST_TABLE: {
dest_tab = qbm.getDestTableForAlias(dest);
// Is the user trying to insert into a external tables
if ((!conf.getBoolVar(HiveConf.ConfVars.HIVE_INSERT_INTO_EXTERNAL_TABLES)) &&
(dest_tab.getTableType().equals(TableType.EXTERNAL_TABLE))) {
throw new SemanticException(
ErrorMsg.INSERT_EXTERNAL_TABLE.getMsg(dest_tab.getTableName()));
}
Map<String, String> partSpec = qbm.getPartSpecForAlias(dest);
dest_path = dest_tab.getPath();
// If the query here is an INSERT_INTO and the target is an immutable table,
// verify that our destination is empty before proceeding
if (dest_tab.isImmutable() &&
qb.getParseInfo().isInsertIntoTable(dest_tab.getDbName(),dest_tab.getTableName())){
try {
FileSystem fs = dest_path.getFileSystem(conf);
if (! MetaStoreUtils.isDirEmpty(fs,dest_path)){
LOG.warn("Attempted write into an immutable table : "
+ dest_tab.getTableName() + " : " + dest_path);
throw new SemanticException(
ErrorMsg.INSERT_INTO_IMMUTABLE_TABLE.getMsg(dest_tab.getTableName()));
}
} catch (IOException ioe) {
LOG.warn("Error while trying to determine if immutable table has any data : "
+ dest_tab.getTableName() + " : " + dest_path);
throw new SemanticException(ErrorMsg.INSERT_INTO_IMMUTABLE_TABLE.getMsg(ioe.getMessage()));
}
}
// check for partition
List<FieldSchema> parts = dest_tab.getPartitionKeys();
if (parts != null && parts.size() > 0) { // table is partitioned
if (partSpec == null || partSpec.size() == 0) { // user did NOT specify partition
throw new SemanticException(generateErrorMessage(
qb.getParseInfo().getDestForClause(dest),
ErrorMsg.NEED_PARTITION_ERROR.getMsg()));
}
// the HOLD_DDLTIIME hint should not be used with dynamic partition since the
// newly generated partitions should always update their DDLTIME
if (holdDDLTime) {
throw new SemanticException(generateErrorMessage(
qb.getParseInfo().getDestForClause(dest),
ErrorMsg.HOLD_DDLTIME_ON_NONEXIST_PARTITIONS.getMsg()));
}
dpCtx = qbm.getDPCtx(dest);
if (dpCtx == null) {
dest_tab.validatePartColumnNames(partSpec, false);
dpCtx = new DynamicPartitionCtx(dest_tab, partSpec,
conf.getVar(HiveConf.ConfVars.DEFAULTPARTITIONNAME),
conf.getIntVar(HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTSPERNODE));
qbm.setDPCtx(dest, dpCtx);
}
if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.DYNAMICPARTITIONING)) { // allow DP
throw new SemanticException(generateErrorMessage(
qb.getParseInfo().getDestForClause(dest),
ErrorMsg.DYNAMIC_PARTITION_DISABLED.getMsg()));
}
if (dpCtx.getSPPath() != null) {
dest_path = new Path(dest_tab.getPath(), dpCtx.getSPPath());
}
if ((dest_tab.getNumBuckets() > 0) &&
(conf.getBoolVar(HiveConf.ConfVars.HIVEENFORCEBUCKETING))) {
dpCtx.setNumBuckets(dest_tab.getNumBuckets());
}
}
boolean isNonNativeTable = dest_tab.isNonNative();
if (isNonNativeTable) {
queryTmpdir = dest_path;
} else {
// if we are on viewfs we don't want to use /tmp as tmp dir since rename from /tmp/..
// to final /user/hive/warehouse/ will fail later, so instead pick tmp dir
// on same namespace as tbl dir.
queryTmpdir = dest_path.toUri().getScheme().equals("viewfs") ?
ctx.getExtTmpPathRelTo(dest_path.getParent().toUri()) :
ctx.getExternalTmpPath(dest_path.toUri());
}
if (dpCtx != null) {
// set the root of the temporary path where dynamic partition columns will populate
dpCtx.setRootPath(queryTmpdir);
}
// this table_desc does not contain the partitioning columns
table_desc = Utilities.getTableDesc(dest_tab);
// Add sorting/bucketing if needed
input = genBucketingSortingDest(dest, input, qb, table_desc, dest_tab, rsCtx);
idToTableNameMap.put(String.valueOf(destTableId), dest_tab.getTableName());
currentTableId = destTableId;
destTableId++;
lbCtx = constructListBucketingCtx(dest_tab.getSkewedColNames(),
dest_tab.getSkewedColValues(), dest_tab.getSkewedColValueLocationMaps(),
dest_tab.isStoredAsSubDirectories(), conf);
// Create the work for moving the table
// NOTE: specify Dynamic partitions in dest_tab for WriteEntity
if (!isNonNativeTable) {
ltd = new LoadTableDesc(queryTmpdir,table_desc, dpCtx);
ltd.setReplace(!qb.getParseInfo().isInsertIntoTable(dest_tab.getDbName(),
dest_tab.getTableName()));
ltd.setLbCtx(lbCtx);
if (holdDDLTime) {
LOG.info("this query will not update transient_lastDdlTime!");
ltd.setHoldDDLTime(true);
}
loadTableWork.add(ltd);
}
WriteEntity output = null;
// Here only register the whole table for post-exec hook if no DP present
// in the case of DP, we will register WriteEntity in MoveTask when the
// list of dynamically created partitions are known.
if ((dpCtx == null || dpCtx.getNumDPCols() == 0)) {
output = new WriteEntity(dest_tab, determineWriteType(ltd, isNonNativeTable));
if (!outputs.add(output)) {
throw new SemanticException(ErrorMsg.OUTPUT_SPECIFIED_MULTIPLE_TIMES
.getMsg(dest_tab.getTableName()));
}
}
if ((dpCtx != null) && (dpCtx.getNumDPCols() >= 0)) {
// No static partition specified
if (dpCtx.getNumSPCols() == 0) {
output = new WriteEntity(dest_tab, determineWriteType(ltd, isNonNativeTable), false);
outputs.add(output);
}
// part of the partition specified
// Create a DummyPartition in this case. Since, the metastore does not store partial
// partitions currently, we need to store dummy partitions
else {
try {
String ppath = dpCtx.getSPPath();
ppath = ppath.substring(0, ppath.length() - 1);
DummyPartition p =
new DummyPartition(dest_tab, dest_tab.getDbName()
+ "@" + dest_tab.getTableName() + "@" + ppath,
partSpec);
output = new WriteEntity(p, WriteEntity.WriteType.INSERT, false);
outputs.add(output);
} catch (HiveException e) {
throw new SemanticException(e.getMessage(), e);
}
}
}
ctx.getLoadTableOutputMap().put(ltd, output);
break;
}
case QBMetaData.DEST_PARTITION: {
dest_part = qbm.getDestPartitionForAlias(dest);
dest_tab = dest_part.getTable();
if ((!conf.getBoolVar(HiveConf.ConfVars.HIVE_INSERT_INTO_EXTERNAL_TABLES)) &&
dest_tab.getTableType().equals(TableType.EXTERNAL_TABLE)) {
throw new SemanticException(
ErrorMsg.INSERT_EXTERNAL_TABLE.getMsg(dest_tab.getTableName()));
}
Path tabPath = dest_tab.getPath();
Path partPath = dest_part.getDataLocation();
// If the query here is an INSERT_INTO and the target is an immutable table,
// verify that our destination is empty before proceeding
if (dest_tab.isImmutable() &&
qb.getParseInfo().isInsertIntoTable(dest_tab.getDbName(),dest_tab.getTableName())){
qb.getParseInfo().isInsertToTable();
try {
FileSystem fs = partPath.getFileSystem(conf);
if (! MetaStoreUtils.isDirEmpty(fs,partPath)){
LOG.warn("Attempted write into an immutable table partition : "
+ dest_tab.getTableName() + " : " + partPath);
throw new SemanticException(
ErrorMsg.INSERT_INTO_IMMUTABLE_TABLE.getMsg(dest_tab.getTableName()));
}
} catch (IOException ioe) {
LOG.warn("Error while trying to determine if immutable table partition has any data : "
+ dest_tab.getTableName() + " : " + partPath);
throw new SemanticException(ErrorMsg.INSERT_INTO_IMMUTABLE_TABLE.getMsg(ioe.getMessage()));
}
}
// if the table is in a different dfs than the partition,
// replace the partition's dfs with the table's dfs.
dest_path = new Path(tabPath.toUri().getScheme(), tabPath.toUri()
.getAuthority(), partPath.toUri().getPath());
// if we are on viewfs we don't want to use /tmp as tmp dir since rename from /tmp/..
// to final /user/hive/warehouse/ will fail later, so instead pick tmp dir
// on same namespace as tbl dir.
queryTmpdir = dest_path.toUri().getScheme().equals("viewfs") ?
ctx.getExtTmpPathRelTo(dest_path.getParent().toUri()) :
ctx.getExternalTmpPath(dest_path.toUri());
table_desc = Utilities.getTableDesc(dest_tab);
// Add sorting/bucketing if needed
input = genBucketingSortingDest(dest, input, qb, table_desc, dest_tab, rsCtx);
idToTableNameMap.put(String.valueOf(destTableId), dest_tab.getTableName());
currentTableId = destTableId;
destTableId++;
lbCtx = constructListBucketingCtx(dest_part.getSkewedColNames(),
dest_part.getSkewedColValues(), dest_part.getSkewedColValueLocationMaps(),
dest_part.isStoredAsSubDirectories(), conf);
ltd = new LoadTableDesc(queryTmpdir, table_desc, dest_part.getSpec());
ltd.setReplace(!qb.getParseInfo().isInsertIntoTable(dest_tab.getDbName(),
dest_tab.getTableName()));
ltd.setLbCtx(lbCtx);
if (holdDDLTime) {
try {
Partition part = db.getPartition(dest_tab, dest_part.getSpec(), false);
if (part == null) {
throw new SemanticException(generateErrorMessage(
qb.getParseInfo().getDestForClause(dest),
ErrorMsg.HOLD_DDLTIME_ON_NONEXIST_PARTITIONS.getMsg()));
}
} catch (HiveException e) {
throw new SemanticException(e);
}
LOG.info("this query will not update transient_lastDdlTime!");
ltd.setHoldDDLTime(true);
}
loadTableWork.add(ltd);
if (!outputs.add(new WriteEntity(dest_part, (ltd.getReplace() ?
WriteEntity.WriteType.INSERT_OVERWRITE :
WriteEntity.WriteType.INSERT)))) {
throw new SemanticException(ErrorMsg.OUTPUT_SPECIFIED_MULTIPLE_TIMES
.getMsg(dest_tab.getTableName() + "@" + dest_part.getName()));
}
break;
}
case QBMetaData.DEST_LOCAL_FILE:
isLocal = true;
// fall through
case QBMetaData.DEST_DFS_FILE: {
dest_path = new Path(qbm.getDestFileForAlias(dest));
if (isLocal) {
// for local directory - we always write to map-red intermediate
// store and then copy to local fs
queryTmpdir = ctx.getMRTmpPath();
} else {
// otherwise write to the file system implied by the directory
// no copy is required. we may want to revisit this policy in future
try {
Path qPath = FileUtils.makeQualified(dest_path, conf);
queryTmpdir = ctx.getExternalTmpPath(qPath.toUri());
} catch (Exception e) {
throw new SemanticException("Error creating temporary folder on: "
+ dest_path, e);
}
}
String cols = "";
String colTypes = "";
ArrayList<ColumnInfo> colInfos = inputRR.getColumnInfos();
// CTAS case: the file output format and serde are defined by the create
// table command
// rather than taking the default value
List<FieldSchema> field_schemas = null;
CreateTableDesc tblDesc = qb.getTableDesc();
if (tblDesc != null) {
field_schemas = new ArrayList<FieldSchema>();
}
boolean first = true;
for (ColumnInfo colInfo : colInfos) {
String[] nm = inputRR.reverseLookup(colInfo.getInternalName());
if (nm[1] != null) { // non-null column alias
colInfo.setAlias(nm[1]);
}
String colName = colInfo.getInternalName(); //default column name
if (field_schemas != null) {
FieldSchema col = new FieldSchema();
if (!("".equals(nm[0])) && nm[1] != null) {
colName = unescapeIdentifier(colInfo.getAlias()).toLowerCase(); // remove ``
}
col.setName(colName);;
col.setType(colInfo.getType().getTypeName());
field_schemas.add(col);
}
if (!first) {
cols = cols.concat(",");
colTypes = colTypes.concat(":");
}
first = false;
cols = cols.concat(colName);
// Replace VOID type with string when the output is a temp table or
// local files.
// A VOID type can be generated under the query:
//
// select NULL from tt;
// or
// insert overwrite local directory "abc" select NULL from tt;
//
// where there is no column type to which the NULL value should be
// converted.
//
String tName = colInfo.getType().getTypeName();
if (tName.equals(serdeConstants.VOID_TYPE_NAME)) {
colTypes = colTypes.concat(serdeConstants.STRING_TYPE_NAME);
} else {
colTypes = colTypes.concat(tName);
}
}
// update the create table descriptor with the resulting schema.
if (tblDesc != null) {
tblDesc.setCols(new ArrayList<FieldSchema>(field_schemas));
}
boolean isDestTempFile = true;
if (!ctx.isMRTmpFileURI(dest_path.toUri().toString())) {
idToTableNameMap.put(String.valueOf(destTableId), dest_path.toUri().toString());
currentTableId = destTableId;
destTableId++;
isDestTempFile = false;
}
boolean isDfsDir = (dest_type.intValue() == QBMetaData.DEST_DFS_FILE);
loadFileWork.add(new LoadFileDesc(tblDesc, queryTmpdir, dest_path, isDfsDir, cols,
colTypes));
if (tblDesc == null) {
if (qb.getIsQuery()) {
String fileFormat = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYRESULTFILEFORMAT);
table_desc = PlanUtils.getDefaultQueryOutputTableDesc(cols, colTypes, fileFormat);
} else {
table_desc = PlanUtils.getDefaultTableDesc(qb.getLLocalDirectoryDesc(), cols, colTypes);
}
} else {
table_desc = PlanUtils.getTableDesc(tblDesc, cols, colTypes);
}
if (!outputs.add(new WriteEntity(dest_path, !isDfsDir, isDestTempFile))) {
throw new SemanticException(ErrorMsg.OUTPUT_SPECIFIED_MULTIPLE_TIMES
.getMsg(dest_path.toUri().toString()));
}
break;
}
default:
throw new SemanticException("Unknown destination type: " + dest_type);
}
input = genConversionSelectOperator(dest, qb, input, table_desc, dpCtx);
inputRR = opParseCtx.get(input).getRowResolver();
ArrayList<ColumnInfo> vecCol = new ArrayList<ColumnInfo>();
try {
StructObjectInspector rowObjectInspector = (StructObjectInspector) table_desc
.getDeserializer().getObjectInspector();
List<? extends StructField> fields = rowObjectInspector
.getAllStructFieldRefs();
for (int i = 0; i < fields.size(); i++) {
vecCol.add(new ColumnInfo(fields.get(i).getFieldName(), TypeInfoUtils
.getTypeInfoFromObjectInspector(fields.get(i)
.getFieldObjectInspector()), "", false));
}
} catch (Exception e) {
throw new SemanticException(e.getMessage(), e);
}
RowSchema fsRS = new RowSchema(vecCol);
// The output files of a FileSink can be merged if they are either not being written to a table
// or are being written to a table which is either not bucketed or enforce bucketing is not set
// and table the table is either not sorted or enforce sorting is not set
boolean canBeMerged = (dest_tab == null || !((dest_tab.getNumBuckets() > 0 &&
conf.getBoolVar(HiveConf.ConfVars.HIVEENFORCEBUCKETING)) ||
(dest_tab.getSortCols() != null && dest_tab.getSortCols().size() > 0 &&
conf.getBoolVar(HiveConf.ConfVars.HIVEENFORCESORTING))));
FileSinkDesc fileSinkDesc = new FileSinkDesc(
queryTmpdir,
table_desc,
conf.getBoolVar(HiveConf.ConfVars.COMPRESSRESULT),
currentTableId,
rsCtx.isMultiFileSpray(),
canBeMerged,
rsCtx.getNumFiles(),
rsCtx.getTotalFiles(),
rsCtx.getPartnCols(),
dpCtx);
/* Set List Bucketing context. */
if (lbCtx != null) {
lbCtx.processRowSkewedIndex(fsRS);
lbCtx.calculateSkewedValueSubDirList();
}
fileSinkDesc.setLbCtx(lbCtx);
// set it in plan instead of runtime in FileSinkOperator
fileSinkDesc.setStatsCollectRawDataSize(HiveConf.getBoolVar(conf,
HiveConf.ConfVars.HIVE_STATS_COLLECT_RAWDATASIZE));
// set the stats publishing/aggregating key prefix
// the same as directory name. The directory name
// can be changed in the optimizer but the key should not be changed
// it should be the same as the MoveWork's sourceDir.
fileSinkDesc.setStatsAggPrefix(fileSinkDesc.getDirName().toString());
if (HiveConf.getVar(conf, HIVESTATSDBCLASS).equalsIgnoreCase(StatDB.fs.name())) {
String statsTmpLoc = ctx.getExternalTmpPath(queryTmpdir.toUri()).toString();
LOG.info("Set stats collection dir : " + statsTmpLoc);
conf.set(StatsSetupConst.STATS_TMP_LOC, statsTmpLoc);
}
if (dest_part != null) {
try {
String staticSpec = Warehouse.makePartPath(dest_part.getSpec());
fileSinkDesc.setStaticSpec(staticSpec);
} catch (MetaException e) {
throw new SemanticException(e);
}
} else if (dpCtx != null) {
fileSinkDesc.setStaticSpec(dpCtx.getSPPath());
}
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(fileSinkDesc,
fsRS, input), inputRR);
if (ltd != null && SessionState.get() != null) {
SessionState.get().getLineageState()
.mapDirToFop(ltd.getSourcePath(), (FileSinkOperator) output);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Created FileSink Plan for clause: " + dest + "dest_path: "
+ dest_path + " row schema: " + inputRR.toString());
}
fsopToTable.put((FileSinkOperator) output, dest_tab);
return output;
}
/**
* Generate the conversion SelectOperator that converts the columns into the
* types that are expected by the table_desc.
*/
Operator genConversionSelectOperator(String dest, QB qb, Operator input,
TableDesc table_desc, DynamicPartitionCtx dpCtx) throws SemanticException {
StructObjectInspector oi = null;
try {
Deserializer deserializer = table_desc.getDeserializerClass()
.newInstance();
SerDeUtils.initializeSerDe(deserializer, conf, table_desc.getProperties(), null);
oi = (StructObjectInspector) deserializer.getObjectInspector();
} catch (Exception e) {
throw new SemanticException(e);
}
// Check column number
List<? extends StructField> tableFields = oi.getAllStructFieldRefs();
boolean dynPart = HiveConf.getBoolVar(conf, HiveConf.ConfVars.DYNAMICPARTITIONING);
ArrayList<ColumnInfo> rowFields = opParseCtx.get(input).getRowResolver()
.getColumnInfos();
int inColumnCnt = rowFields.size();
int outColumnCnt = tableFields.size();
if (dynPart && dpCtx != null) {
outColumnCnt += dpCtx.getNumDPCols();
}
if (inColumnCnt != outColumnCnt) {
String reason = "Table " + dest + " has " + outColumnCnt
+ " columns, but query has " + inColumnCnt + " columns.";
throw new SemanticException(ErrorMsg.TARGET_TABLE_COLUMN_MISMATCH.getMsg(
qb.getParseInfo().getDestForClause(dest), reason));
} else if (dynPart && dpCtx != null) {
// create the mapping from input ExprNode to dest table DP column
dpCtx.mapInputToDP(rowFields.subList(tableFields.size(), rowFields.size()));
}
// Check column types
boolean converted = false;
int columnNumber = tableFields.size();
ArrayList<ExprNodeDesc> expressions = new ArrayList<ExprNodeDesc>(
columnNumber);
// MetadataTypedColumnsetSerDe does not need type conversions because it
// does the conversion to String by itself.
boolean isMetaDataSerDe = table_desc.getDeserializerClass().equals(
MetadataTypedColumnsetSerDe.class);
boolean isLazySimpleSerDe = table_desc.getDeserializerClass().equals(
LazySimpleSerDe.class);
if (!isMetaDataSerDe) {
// here only deals with non-partition columns. We deal with partition columns next
for (int i = 0; i < columnNumber; i++) {
ObjectInspector tableFieldOI = tableFields.get(i)
.getFieldObjectInspector();
TypeInfo tableFieldTypeInfo = TypeInfoUtils
.getTypeInfoFromObjectInspector(tableFieldOI);
TypeInfo rowFieldTypeInfo = rowFields.get(i).getType();
ExprNodeDesc column = new ExprNodeColumnDesc(rowFieldTypeInfo,
rowFields.get(i).getInternalName(), "", false, rowFields.get(i).isSkewedCol());
// LazySimpleSerDe can convert any types to String type using
// JSON-format.
if (!tableFieldTypeInfo.equals(rowFieldTypeInfo)
&& !(isLazySimpleSerDe
&& tableFieldTypeInfo.getCategory().equals(Category.PRIMITIVE) && tableFieldTypeInfo
.equals(TypeInfoFactory.stringTypeInfo))) {
// need to do some conversions here
converted = true;
if (tableFieldTypeInfo.getCategory() != Category.PRIMITIVE) {
// cannot convert to complex types
column = null;
} else {
column = ParseUtils.createConversionCast(
column, (PrimitiveTypeInfo)tableFieldTypeInfo);
}
if (column == null) {
String reason = "Cannot convert column " + i + " from "
+ rowFieldTypeInfo + " to " + tableFieldTypeInfo + ".";
throw new SemanticException(ErrorMsg.TARGET_TABLE_COLUMN_MISMATCH
.getMsg(qb.getParseInfo().getDestForClause(dest), reason));
}
}
expressions.add(column);
}
}
// deal with dynamic partition columns: convert ExprNodeDesc type to String??
if (dynPart && dpCtx != null && dpCtx.getNumDPCols() > 0) {
// DP columns starts with tableFields.size()
for (int i = tableFields.size(); i < rowFields.size(); ++i) {
TypeInfo rowFieldTypeInfo = rowFields.get(i).getType();
ExprNodeDesc column = new ExprNodeColumnDesc(
rowFieldTypeInfo, rowFields.get(i).getInternalName(), "", false);
expressions.add(column);
}
// converted = true; // [TODO]: should we check & convert type to String and set it to true?
}
if (converted) {
// add the select operator
RowResolver rowResolver = new RowResolver();
ArrayList<String> colName = new ArrayList<String>();
for (int i = 0; i < expressions.size(); i++) {
String name = getColumnInternalName(i);
rowResolver.put("", name, new ColumnInfo(name, expressions.get(i)
.getTypeInfo(), "", false));
colName.add(name);
}
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
new SelectDesc(expressions, colName), new RowSchema(rowResolver
.getColumnInfos()), input), rowResolver);
return output;
} else {
// not converted
return input;
}
}
@SuppressWarnings("nls")
private Operator genLimitPlan(String dest, QB qb, Operator input, int limit)
throws SemanticException {
// A map-only job can be optimized - instead of converting it to a
// map-reduce job, we can have another map
// job to do the same to avoid the cost of sorting in the map-reduce phase.
// A better approach would be to
// write into a local file and then have a map-only job.
// Add the limit operator to get the value fields
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
LimitDesc limitDesc = new LimitDesc(limit);
globalLimitCtx.setLastReduceLimitDesc(limitDesc);
Operator limitMap = putOpInsertMap(OperatorFactory.getAndMakeChild(
limitDesc, new RowSchema(inputRR.getColumnInfos()), input),
inputRR);
if (LOG.isDebugEnabled()) {
LOG.debug("Created LimitOperator Plan for clause: " + dest
+ " row schema: " + inputRR.toString());
}
return limitMap;
}
private Operator genUDTFPlan(GenericUDTF genericUDTF,
String outputTableAlias, ArrayList<String> colAliases, QB qb,
Operator input, boolean outerLV) throws SemanticException {
// No GROUP BY / DISTRIBUTE BY / SORT BY / CLUSTER BY
QBParseInfo qbp = qb.getParseInfo();
if (!qbp.getDestToGroupBy().isEmpty()) {
throw new SemanticException(ErrorMsg.UDTF_NO_GROUP_BY.getMsg());
}
if (!qbp.getDestToDistributeBy().isEmpty()) {
throw new SemanticException(ErrorMsg.UDTF_NO_DISTRIBUTE_BY.getMsg());
}
if (!qbp.getDestToSortBy().isEmpty()) {
throw new SemanticException(ErrorMsg.UDTF_NO_SORT_BY.getMsg());
}
if (!qbp.getDestToClusterBy().isEmpty()) {
throw new SemanticException(ErrorMsg.UDTF_NO_CLUSTER_BY.getMsg());
}
if (!qbp.getAliasToLateralViews().isEmpty()) {
throw new SemanticException(ErrorMsg.UDTF_LATERAL_VIEW.getMsg());
}
if (LOG.isDebugEnabled()) {
LOG.debug("Table alias: " + outputTableAlias + " Col aliases: "
+ colAliases);
}
// Use the RowResolver from the input operator to generate a input
// ObjectInspector that can be used to initialize the UDTF. Then, the
// resulting output object inspector can be used to make the RowResolver
// for the UDTF operator
RowResolver selectRR = opParseCtx.get(input).getRowResolver();
ArrayList<ColumnInfo> inputCols = selectRR.getColumnInfos();
// Create the object inspector for the input columns and initialize the UDTF
ArrayList<String> colNames = new ArrayList<String>();
ObjectInspector[] colOIs = new ObjectInspector[inputCols.size()];
for (int i = 0; i < inputCols.size(); i++) {
colNames.add(inputCols.get(i).getInternalName());
colOIs[i] = inputCols.get(i).getObjectInspector();
}
StandardStructObjectInspector rowOI =
ObjectInspectorFactory.getStandardStructObjectInspector(colNames, Arrays.asList(colOIs));
StructObjectInspector outputOI = genericUDTF.initialize(rowOI);
int numUdtfCols = outputOI.getAllStructFieldRefs().size();
if (colAliases.isEmpty()) {
// user did not specfied alias names, infer names from outputOI
for (StructField field : outputOI.getAllStructFieldRefs()) {
colAliases.add(field.getFieldName());
}
}
// Make sure that the number of column aliases in the AS clause matches
// the number of columns output by the UDTF
int numSuppliedAliases = colAliases.size();
if (numUdtfCols != numSuppliedAliases) {
throw new SemanticException(ErrorMsg.UDTF_ALIAS_MISMATCH
.getMsg("expected " + numUdtfCols + " aliases " + "but got "
+ numSuppliedAliases));
}
// Generate the output column info's / row resolver using internal names.
ArrayList<ColumnInfo> udtfCols = new ArrayList<ColumnInfo>();
Iterator<String> colAliasesIter = colAliases.iterator();
for (StructField sf : outputOI.getAllStructFieldRefs()) {
String colAlias = colAliasesIter.next();
assert (colAlias != null);
// Since the UDTF operator feeds into a LVJ operator that will rename
// all the internal names, we can just use field name from the UDTF's OI
// as the internal name
ColumnInfo col = new ColumnInfo(sf.getFieldName(), TypeInfoUtils
.getTypeInfoFromObjectInspector(sf.getFieldObjectInspector()),
outputTableAlias, false);
udtfCols.add(col);
}
// Create the row resolver for this operator from the output columns
RowResolver out_rwsch = new RowResolver();
for (int i = 0; i < udtfCols.size(); i++) {
out_rwsch.put(outputTableAlias, colAliases.get(i), udtfCols.get(i));
}
// Add the UDTFOperator to the operator DAG
Operator<?> udtf = putOpInsertMap(OperatorFactory.getAndMakeChild(
new UDTFDesc(genericUDTF, outerLV), new RowSchema(out_rwsch.getColumnInfos()),
input), out_rwsch);
return udtf;
}
@SuppressWarnings("nls")
private Operator genLimitMapRedPlan(String dest, QB qb, Operator input,
int limit, boolean extraMRStep) throws SemanticException {
// A map-only job can be optimized - instead of converting it to a
// map-reduce job, we can have another map
// job to do the same to avoid the cost of sorting in the map-reduce phase.
// A better approach would be to
// write into a local file and then have a map-only job.
// Add the limit operator to get the value fields
Operator curr = genLimitPlan(dest, qb, input, limit);
// the client requested that an extra map-reduce step be performed
if (!extraMRStep) {
return curr;
}
// Create a reduceSink operator followed by another limit
curr = genReduceSinkPlan(dest, qb, curr, 1);
return genLimitPlan(dest, qb, curr, limit);
}
private ArrayList<ExprNodeDesc> getPartitionColsFromBucketCols(String dest, QB qb, Table tab,
TableDesc table_desc, Operator input, boolean convert)
throws SemanticException {
List<String> tabBucketCols = tab.getBucketCols();
List<FieldSchema> tabCols = tab.getCols();
// Partition by the bucketing column
List<Integer> posns = new ArrayList<Integer>();
for (String bucketCol : tabBucketCols) {
int pos = 0;
for (FieldSchema tabCol : tabCols) {
if (bucketCol.equals(tabCol.getName())) {
posns.add(pos);
break;
}
pos++;
}
}
return genConvertCol(dest, qb, tab, table_desc, input, posns, convert);
}
private ArrayList<ExprNodeDesc> genConvertCol(String dest, QB qb, Table tab,
TableDesc table_desc, Operator input, List<Integer> posns, boolean convert)
throws SemanticException {
StructObjectInspector oi = null;
try {
Deserializer deserializer = table_desc.getDeserializerClass()
.newInstance();
SerDeUtils.initializeSerDe(deserializer, conf, table_desc.getProperties(), null);
oi = (StructObjectInspector) deserializer.getObjectInspector();
} catch (Exception e) {
throw new SemanticException(e);
}
List<? extends StructField> tableFields = oi.getAllStructFieldRefs();
ArrayList<ColumnInfo> rowFields = opParseCtx.get(input).getRowResolver()
.getColumnInfos();
// Check column type
int columnNumber = posns.size();
ArrayList<ExprNodeDesc> expressions = new ArrayList<ExprNodeDesc>(columnNumber);
for (Integer posn : posns) {
ObjectInspector tableFieldOI = tableFields.get(posn).getFieldObjectInspector();
TypeInfo tableFieldTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(tableFieldOI);
TypeInfo rowFieldTypeInfo = rowFields.get(posn).getType();
ExprNodeDesc column = new ExprNodeColumnDesc(rowFieldTypeInfo,
rowFields.get(posn).getInternalName(), rowFields.get(posn).getTabAlias(),
rowFields.get(posn).getIsVirtualCol());
if (convert && !tableFieldTypeInfo.equals(rowFieldTypeInfo)) {
// need to do some conversions here
if (tableFieldTypeInfo.getCategory() != Category.PRIMITIVE) {
// cannot convert to complex types
column = null;
} else {
column = ParseUtils.createConversionCast(
column, (PrimitiveTypeInfo)tableFieldTypeInfo);
}
if (column == null) {
String reason = "Cannot convert column " + posn + " from "
+ rowFieldTypeInfo + " to " + tableFieldTypeInfo + ".";
throw new SemanticException(ErrorMsg.TARGET_TABLE_COLUMN_MISMATCH
.getMsg(qb.getParseInfo().getDestForClause(dest), reason));
}
}
expressions.add(column);
}
return expressions;
}
private ArrayList<ExprNodeDesc> getSortCols(String dest, QB qb, Table tab, TableDesc table_desc,
Operator input, boolean convert)
throws SemanticException {
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
List<Order> tabSortCols = tab.getSortCols();
List<FieldSchema> tabCols = tab.getCols();
// Partition by the bucketing column
List<Integer> posns = new ArrayList<Integer>();
for (Order sortCol : tabSortCols) {
int pos = 0;
for (FieldSchema tabCol : tabCols) {
if (sortCol.getCol().equals(tabCol.getName())) {
ColumnInfo colInfo = inputRR.getColumnInfos().get(pos);
posns.add(pos);
break;
}
pos++;
}
}
return genConvertCol(dest, qb, tab, table_desc, input, posns, convert);
}
private ArrayList<Integer> getSortOrders(String dest, QB qb, Table tab, Operator input)
throws SemanticException {
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
List<Order> tabSortCols = tab.getSortCols();
List<FieldSchema> tabCols = tab.getCols();
ArrayList<Integer> orders = new ArrayList<Integer>();
for (Order sortCol : tabSortCols) {
for (FieldSchema tabCol : tabCols) {
if (sortCol.getCol().equals(tabCol.getName())) {
orders.add(sortCol.getOrder());
break;
}
}
}
return orders;
}
@SuppressWarnings("nls")
private Operator genReduceSinkPlanForSortingBucketing(Table tab, Operator input,
ArrayList<ExprNodeDesc> sortCols,
List<Integer> sortOrders,
ArrayList<ExprNodeDesc> partitionCols,
int numReducers)
throws SemanticException {
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
// For the generation of the values expression just get the inputs
// signature and generate field expressions for those
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
ArrayList<ExprNodeDesc> valueCols = new ArrayList<ExprNodeDesc>();
ArrayList<String> outputColumns = new ArrayList<String>();
int i = 0;
for (ColumnInfo colInfo : inputRR.getColumnInfos()) {
String internalName = getColumnInternalName(i++);
outputColumns.add(internalName);
valueCols.add(new ExprNodeColumnDesc(colInfo.getType(), colInfo
.getInternalName(), colInfo.getTabAlias(), colInfo
.getIsVirtualCol()));
colExprMap.put(internalName, valueCols
.get(valueCols.size() - 1));
}
StringBuilder order = new StringBuilder();
for (int sortOrder : sortOrders) {
order.append(sortOrder == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_ASC ? '+' : '-');
}
Operator interim = putOpInsertMap(OperatorFactory.getAndMakeChild(PlanUtils
.getReduceSinkDesc(sortCols, valueCols, outputColumns, false, -1,
partitionCols, order.toString(), numReducers),
new RowSchema(inputRR.getColumnInfos()), input), inputRR);
interim.setColumnExprMap(colExprMap);
reduceSinkOperatorsAddedByEnforceBucketingSorting.add((ReduceSinkOperator) interim);
// Add the extract operator to get the value fields
RowResolver out_rwsch = new RowResolver();
RowResolver interim_rwsch = inputRR;
Integer pos = Integer.valueOf(0);
for (ColumnInfo colInfo : interim_rwsch.getColumnInfos()) {
String[] info = interim_rwsch.reverseLookup(colInfo.getInternalName());
out_rwsch.put(info[0], info[1], new ColumnInfo(
getColumnInternalName(pos), colInfo.getType(), info[0],
colInfo.getIsVirtualCol(), colInfo.isHiddenVirtualCol()));
pos = Integer.valueOf(pos.intValue() + 1);
}
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
new ExtractDesc(new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,
Utilities.ReduceField.VALUE.toString(), "", false)), new RowSchema(
out_rwsch.getColumnInfos()), interim), out_rwsch);
if (LOG.isDebugEnabled()) {
LOG.debug("Created ReduceSink Plan for table: " + tab.getTableName() +
" row schema: " + out_rwsch.toString());
}
return output;
}
@SuppressWarnings("nls")
private Operator genReduceSinkPlan(String dest, QB qb, Operator<?> input,
int numReducers) throws SemanticException {
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
// First generate the expression for the partition and sort keys
// The cluster by clause / distribute by clause has the aliases for
// partition function
ASTNode partitionExprs = qb.getParseInfo().getClusterByForClause(dest);
if (partitionExprs == null) {
partitionExprs = qb.getParseInfo().getDistributeByForClause(dest);
}
ArrayList<ExprNodeDesc> partitionCols = new ArrayList<ExprNodeDesc>();
if (partitionExprs != null) {
int ccount = partitionExprs.getChildCount();
for (int i = 0; i < ccount; ++i) {
ASTNode cl = (ASTNode) partitionExprs.getChild(i);
partitionCols.add(genExprNodeDesc(cl, inputRR));
}
}
ASTNode sortExprs = qb.getParseInfo().getClusterByForClause(dest);
if (sortExprs == null) {
sortExprs = qb.getParseInfo().getSortByForClause(dest);
}
if (sortExprs == null) {
sortExprs = qb.getParseInfo().getOrderByForClause(dest);
if (sortExprs != null) {
assert numReducers == 1;
// in strict mode, in the presence of order by, limit must be specified
Integer limit = qb.getParseInfo().getDestLimit(dest);
if (conf.getVar(HiveConf.ConfVars.HIVEMAPREDMODE).equalsIgnoreCase(
"strict")
&& limit == null) {
throw new SemanticException(generateErrorMessage(sortExprs,
ErrorMsg.NO_LIMIT_WITH_ORDERBY.getMsg()));
}
}
}
Operator dummy = Operator.createDummy();
dummy.setParentOperators(Arrays.asList(input));
ArrayList<ExprNodeDesc> sortCols = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> sortColsBack = new ArrayList<ExprNodeDesc>();
StringBuilder order = new StringBuilder();
if (sortExprs != null) {
int ccount = sortExprs.getChildCount();
for (int i = 0; i < ccount; ++i) {
ASTNode cl = (ASTNode) sortExprs.getChild(i);
if (cl.getType() == HiveParser.TOK_TABSORTCOLNAMEASC) {
// SortBy ASC
order.append("+");
cl = (ASTNode) cl.getChild(0);
} else if (cl.getType() == HiveParser.TOK_TABSORTCOLNAMEDESC) {
// SortBy DESC
order.append("-");
cl = (ASTNode) cl.getChild(0);
} else {
// ClusterBy
order.append("+");
}
ExprNodeDesc exprNode = genExprNodeDesc(cl, inputRR);
sortCols.add(exprNode);
sortColsBack.add(ExprNodeDescUtils.backtrack(exprNode, dummy, input));
}
}
// For the generation of the values expression just get the inputs
// signature and generate field expressions for those
RowResolver rsRR = new RowResolver();
ArrayList<String> outputColumns = new ArrayList<String>();
ArrayList<ExprNodeDesc> valueCols = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> valueColsBack = new ArrayList<ExprNodeDesc>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
ArrayList<ColumnInfo> columnInfos = inputRR.getColumnInfos();
int[] index = new int[columnInfos.size()];
for (int i = 0; i < index.length; i++) {
ColumnInfo colInfo = columnInfos.get(i);
String[] nm = inputRR.reverseLookup(colInfo.getInternalName());
String[] nm2 = inputRR.getAlternateMappings(colInfo.getInternalName());
ExprNodeColumnDesc value = new ExprNodeColumnDesc(colInfo.getType(),
colInfo.getInternalName(), colInfo.getTabAlias(), colInfo.getIsVirtualCol());
// backtrack can be null when input is script operator
ExprNodeDesc valueBack = ExprNodeDescUtils.backtrack(value, dummy, input);
int kindex = valueBack == null ? -1 : ExprNodeDescUtils.indexOf(valueBack, sortColsBack);
if (kindex >= 0) {
index[i] = kindex;
ColumnInfo newColInfo = new ColumnInfo(colInfo);
newColInfo.setInternalName(Utilities.ReduceField.KEY + ".reducesinkkey" + kindex);
newColInfo.setTabAlias(nm[0]);
rsRR.addMappingOnly(nm[0], nm[1], newColInfo);
if (nm2 != null) {
rsRR.addMappingOnly(nm2[0], nm2[1], newColInfo);
}
continue;
}
int vindex = valueBack == null ? -1 : ExprNodeDescUtils.indexOf(valueBack, valueColsBack);
if (vindex >= 0) {
index[i] = -vindex - 1;
continue;
}
index[i] = -valueCols.size() - 1;
String outputColName = getColumnInternalName(valueCols.size());
valueCols.add(value);
valueColsBack.add(valueBack);
ColumnInfo newColInfo = new ColumnInfo(colInfo);
newColInfo.setInternalName(Utilities.ReduceField.VALUE + "." + outputColName);
newColInfo.setTabAlias(nm[0]);
rsRR.put(nm[0], nm[1], newColInfo);
if (nm2 != null) {
rsRR.addMappingOnly(nm2[0], nm2[1], newColInfo);
}
outputColumns.add(outputColName);
}
dummy.setParentOperators(null);
ReduceSinkDesc rsdesc = PlanUtils.getReduceSinkDesc(sortCols, valueCols, outputColumns,
false, -1, partitionCols, order.toString(), numReducers);
Operator interim = putOpInsertMap(OperatorFactory.getAndMakeChild(rsdesc,
new RowSchema(rsRR.getColumnInfos()), input), rsRR);
List<String> keyColNames = rsdesc.getOutputKeyColumnNames();
for (int i = 0 ; i < keyColNames.size(); i++) {
colExprMap.put(Utilities.ReduceField.KEY + "." + keyColNames.get(i), sortCols.get(i));
}
List<String> valueColNames = rsdesc.getOutputValueColumnNames();
for (int i = 0 ; i < valueColNames.size(); i++) {
colExprMap.put(Utilities.ReduceField.VALUE + "." + valueColNames.get(i), valueCols.get(i));
}
interim.setColumnExprMap(colExprMap);
RowResolver selectRR = new RowResolver();
ArrayList<ExprNodeDesc> selCols = new ArrayList<ExprNodeDesc>();
ArrayList<String> selOutputCols = new ArrayList<String>();
Map<String, ExprNodeDesc> selColExprMap = new HashMap<String, ExprNodeDesc>();
for (int i = 0; i < index.length; i++) {
ColumnInfo prev = columnInfos.get(i);
String[] nm = inputRR.reverseLookup(prev.getInternalName());
String[] nm2 = inputRR.getAlternateMappings(prev.getInternalName());
ColumnInfo info = new ColumnInfo(prev);
String field;
if (index[i] >= 0) {
field = Utilities.ReduceField.KEY + "." + keyColNames.get(index[i]);
} else {
field = Utilities.ReduceField.VALUE + "." + valueColNames.get(-index[i] - 1);
}
String internalName = getColumnInternalName(i);
ExprNodeColumnDesc desc = new ExprNodeColumnDesc(info.getType(),
field, info.getTabAlias(), info.getIsVirtualCol());
selCols.add(desc);
info.setInternalName(internalName);
selectRR.put(nm[0], nm[1], info);
if (nm2 != null) {
selectRR.addMappingOnly(nm2[0], nm2[1], info);
}
selOutputCols.add(internalName);
selColExprMap.put(internalName, desc);
}
SelectDesc select = new SelectDesc(selCols, selOutputCols);
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(select,
new RowSchema(selectRR.getColumnInfos()), interim), selectRR);
output.setColumnExprMap(selColExprMap);
return output;
}
private Operator genJoinOperatorChildren(QBJoinTree join, Operator left,
Operator[] right, HashSet<Integer> omitOpts) throws SemanticException {
RowResolver outputRR = new RowResolver();
ArrayList<String> outputColumnNames = new ArrayList<String>();
// all children are base classes
Operator<?>[] rightOps = new Operator[right.length];
int outputPos = 0;
Map<String, Byte> reversedExprs = new HashMap<String, Byte>();
HashMap<Byte, List<ExprNodeDesc>> exprMap = new HashMap<Byte, List<ExprNodeDesc>>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
HashMap<Integer, Set<String>> posToAliasMap = new HashMap<Integer, Set<String>>();
HashMap<Byte, List<ExprNodeDesc>> filterMap =
new HashMap<Byte, List<ExprNodeDesc>>();
for (int pos = 0; pos < right.length; ++pos) {
Operator<?> input = right[pos] == null ? left : right[pos];
if (input == null) {
input = left;
}
ReduceSinkOperator rs = (ReduceSinkOperator) input;
if (rs.getNumParent() != 1) {
throw new SemanticException("RS should have single parent");
}
Operator<?> parent = rs.getParentOperators().get(0);
ReduceSinkDesc rsDesc = (ReduceSinkDesc) (input.getConf());
int[] index = rs.getValueIndex();
ArrayList<ExprNodeDesc> valueDesc = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> filterDesc = new ArrayList<ExprNodeDesc>();
Byte tag = (byte) rsDesc.getTag();
// check whether this input operator produces output
if (omitOpts != null && omitOpts.contains(pos)) {
exprMap.put(tag, valueDesc);
filterMap.put(tag, filterDesc);
rightOps[pos] = input;
continue;
}
List<String> keyColNames = rsDesc.getOutputKeyColumnNames();
List<String> valColNames = rsDesc.getOutputValueColumnNames();
// prepare output descriptors for the input opt
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
RowResolver parentRR = opParseCtx.get(parent).getRowResolver();
posToAliasMap.put(pos, new HashSet<String>(inputRR.getTableNames()));
List<ColumnInfo> columns = parentRR.getColumnInfos();
for (int i = 0; i < index.length; i++) {
ColumnInfo prev = columns.get(i);
String[] nm = parentRR.reverseLookup(prev.getInternalName());
String[] nm2 = parentRR.getAlternateMappings(prev.getInternalName());
if (outputRR.get(nm[0], nm[1]) != null) {
continue;
}
ColumnInfo info = new ColumnInfo(prev);
String field;
if (index[i] >= 0) {
field = Utilities.ReduceField.KEY + "." + keyColNames.get(index[i]);
} else {
field = Utilities.ReduceField.VALUE + "." + valColNames.get(-index[i] - 1);
}
String internalName = getColumnInternalName(outputColumnNames.size());
ExprNodeColumnDesc desc = new ExprNodeColumnDesc(info.getType(),
field, info.getTabAlias(), info.getIsVirtualCol());
info.setInternalName(internalName);
colExprMap.put(internalName, desc);
outputRR.put(nm[0], nm[1], info);
if (nm2 != null) {
outputRR.addMappingOnly(nm2[0], nm2[1], info);
}
valueDesc.add(desc);
outputColumnNames.add(internalName);
reversedExprs.put(internalName, tag);
}
for (ASTNode cond : join.getFilters().get(tag)) {
filterDesc.add(genExprNodeDesc(cond, inputRR));
}
exprMap.put(tag, valueDesc);
filterMap.put(tag, filterDesc);
rightOps[pos] = input;
}
JoinCondDesc[] joinCondns = new JoinCondDesc[join.getJoinCond().length];
for (int i = 0; i < join.getJoinCond().length; i++) {
JoinCond condn = join.getJoinCond()[i];
joinCondns[i] = new JoinCondDesc(condn);
}
JoinDesc desc = new JoinDesc(exprMap, outputColumnNames,
join.getNoOuterJoin(), joinCondns, filterMap);
desc.setReversedExprs(reversedExprs);
desc.setFilterMap(join.getFilterMap());
JoinOperator joinOp = (JoinOperator) OperatorFactory.getAndMakeChild(desc,
new RowSchema(outputRR.getColumnInfos()), rightOps);
joinOp.setColumnExprMap(colExprMap);
joinOp.setPosToAliasMap(posToAliasMap);
if (join.getNullSafes() != null) {
boolean[] nullsafes = new boolean[join.getNullSafes().size()];
for (int i = 0; i < nullsafes.length; i++) {
nullsafes[i] = join.getNullSafes().get(i);
}
desc.setNullSafes(nullsafes);
}
queryProperties.incrementJoinCount(joinOp.getConf().getNoOuterJoin());
return putOpInsertMap(joinOp, outputRR);
}
private ExprNodeDesc[][] genJoinKeys(QBJoinTree joinTree, Operator[] inputs)
throws SemanticException {
ExprNodeDesc[][] joinKeys = new ExprNodeDesc[inputs.length][];
for (int i = 0; i < inputs.length; i++) {
RowResolver inputRR = opParseCtx.get(inputs[i]).getRowResolver();
List<ASTNode> expressions = joinTree.getExpressions().get(i);
joinKeys[i] = new ExprNodeDesc[expressions.size()];
for (int j = 0; j < joinKeys[i].length; j++) {
joinKeys[i][j] = genExprNodeDesc(expressions.get(j), inputRR);
}
}
// Type checking and implicit type conversion for join keys
return genJoinOperatorTypeCheck(joinKeys);
}
@SuppressWarnings("nls")
private Operator genJoinReduceSinkChild(QB qb, ExprNodeDesc[] joinKeys,
Operator<?> child, String[] srcs, int tag) throws SemanticException {
Operator dummy = Operator.createDummy(); // dummy for backtracking
dummy.setParentOperators(Arrays.asList(child));
RowResolver inputRR = opParseCtx.get(child).getRowResolver();
RowResolver outputRR = new RowResolver();
ArrayList<String> outputColumns = new ArrayList<String>();
ArrayList<ExprNodeDesc> reduceKeys = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> reduceKeysBack = new ArrayList<ExprNodeDesc>();
// Compute join keys and store in reduceKeys
for (ExprNodeDesc joinKey : joinKeys) {
reduceKeys.add(joinKey);
reduceKeysBack.add(ExprNodeDescUtils.backtrack(joinKey, dummy, child));
}
// Walk over the input row resolver and copy in the output
ArrayList<ExprNodeDesc> reduceValues = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> reduceValuesBack = new ArrayList<ExprNodeDesc>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
List<ColumnInfo> columns = inputRR.getColumnInfos();
int[] index = new int[columns.size()];
for (int i = 0; i < columns.size(); i++) {
ColumnInfo colInfo = columns.get(i);
String[] nm = inputRR.reverseLookup(colInfo.getInternalName());
String[] nm2 = inputRR.getAlternateMappings(colInfo.getInternalName());
ExprNodeDesc expr = new ExprNodeColumnDesc(colInfo.getType(),
colInfo.getInternalName(), colInfo.getTabAlias(), colInfo.getIsVirtualCol());
// backtrack can be null when input is script operator
ExprNodeDesc exprBack = ExprNodeDescUtils.backtrack(expr, dummy, child);
int kindex = exprBack == null ? -1 : ExprNodeDescUtils.indexOf(exprBack, reduceKeysBack);
if (kindex >= 0) {
ColumnInfo newColInfo = new ColumnInfo(colInfo);
newColInfo.setInternalName(Utilities.ReduceField.KEY + ".reducesinkkey" + kindex);
newColInfo.setTabAlias(nm[0]);
outputRR.addMappingOnly(nm[0], nm[1], newColInfo);
if (nm2 != null) {
outputRR.addMappingOnly(nm2[0], nm2[1], newColInfo);
}
index[i] = kindex;
continue;
}
int vindex = exprBack == null ? -1 : ExprNodeDescUtils.indexOf(exprBack, reduceValuesBack);
if (kindex >= 0) {
index[i] = -vindex - 1;
continue;
}
index[i] = -reduceValues.size() - 1;
String outputColName = getColumnInternalName(reduceValues.size());
reduceValues.add(expr);
reduceValuesBack.add(exprBack);
ColumnInfo newColInfo = new ColumnInfo(colInfo);
newColInfo.setInternalName(Utilities.ReduceField.VALUE + "." + outputColName);
newColInfo.setTabAlias(nm[0]);
outputRR.put(nm[0], nm[1], newColInfo);
if (nm2 != null) {
outputRR.addMappingOnly(nm2[0], nm2[1], newColInfo);
}
outputColumns.add(outputColName);
}
dummy.setParentOperators(null);
int numReds = -1;
// Use only 1 reducer in case of cartesian product
if (reduceKeys.size() == 0) {
numReds = 1;
// Cartesian product is not supported in strict mode
if (conf.getVar(HiveConf.ConfVars.HIVEMAPREDMODE).equalsIgnoreCase(
"strict")) {
throw new SemanticException(ErrorMsg.NO_CARTESIAN_PRODUCT.getMsg());
}
}
ReduceSinkDesc rsDesc = PlanUtils.getReduceSinkDesc(reduceKeys,
reduceValues, outputColumns, false, tag,
reduceKeys.size(), numReds);
ReduceSinkOperator rsOp = (ReduceSinkOperator) putOpInsertMap(
OperatorFactory.getAndMakeChild(rsDesc, new RowSchema(outputRR
.getColumnInfos()), child), outputRR);
List<String> keyColNames = rsDesc.getOutputKeyColumnNames();
for (int i = 0 ; i < keyColNames.size(); i++) {
colExprMap.put(Utilities.ReduceField.KEY + "." + keyColNames.get(i), reduceKeys.get(i));
}
List<String> valColNames = rsDesc.getOutputValueColumnNames();
for (int i = 0 ; i < valColNames.size(); i++) {
colExprMap.put(Utilities.ReduceField.VALUE + "." + valColNames.get(i), reduceValues.get(i));
}
rsOp.setValueIndex(index);
rsOp.setColumnExprMap(colExprMap);
rsOp.setInputAliases(srcs);
return rsOp;
}
private Operator genJoinOperator(QB qb, QBJoinTree joinTree,
Map<String, Operator> map,
Operator joiningOp) throws SemanticException {
QBJoinTree leftChild = joinTree.getJoinSrc();
Operator joinSrcOp = joiningOp instanceof JoinOperator ? joiningOp : null;
if (joinSrcOp == null && leftChild != null) {
joinSrcOp = genJoinOperator(qb, leftChild, map, null);
}
if ( joinSrcOp != null ) {
ArrayList<ASTNode> filter = joinTree.getFiltersForPushing().get(0);
for (ASTNode cond : filter) {
joinSrcOp = genFilterPlan(qb, cond, joinSrcOp);
}
}
String[] baseSrc = joinTree.getBaseSrc();
Operator[] srcOps = new Operator[baseSrc.length];
HashSet<Integer> omitOpts = null; // set of input to the join that should be
// omitted by the output
int pos = 0;
for (String src : baseSrc) {
if (src != null) {
Operator srcOp = map.get(src.toLowerCase());
// for left-semi join, generate an additional selection & group-by
// operator before ReduceSink
ArrayList<ASTNode> fields = joinTree.getRHSSemijoinColumns(src);
if (fields != null) {
// the RHS table columns should be not be output from the join
if (omitOpts == null) {
omitOpts = new HashSet<Integer>();
}
omitOpts.add(pos);
// generate a selection operator for group-by keys only
srcOp = insertSelectForSemijoin(fields, srcOp);
// generate a groupby operator (HASH mode) for a map-side partial
// aggregation for semijoin
srcOps[pos++] = genMapGroupByForSemijoin(qb, fields, srcOp,
GroupByDesc.Mode.HASH);
} else {
srcOps[pos++] = srcOp;
}
} else {
assert pos == 0;
srcOps[pos++] = joinSrcOp;
}
}
ExprNodeDesc[][] joinKeys = genJoinKeys(joinTree, srcOps);
for (int i = 0; i < srcOps.length; i++) {
// generate a ReduceSink operator for the join
String[] srcs = baseSrc[i] != null ? new String[] {baseSrc[i]} : joinTree.getLeftAliases();
srcOps[i] = genNotNullFilterForJoinSourcePlan(qb, srcOps[i], joinTree, joinKeys[i]);
srcOps[i] = genJoinReduceSinkChild(qb, joinKeys[i], srcOps[i], srcs, joinTree.getNextTag());
}
JoinOperator joinOp = (JoinOperator) genJoinOperatorChildren(joinTree,
joinSrcOp, srcOps, omitOpts);
joinContext.put(joinOp, joinTree);
Operator op = joinOp;
for(ASTNode condn : joinTree.getPostJoinFilters() ) {
op = genFilterPlan(qb, condn, op);
}
return op;
}
/**
* Construct a selection operator for semijoin that filter out all fields
* other than the group by keys.
*
* @param fields
* list of fields need to be output
* @param input
* input operator
* @return the selection operator.
* @throws SemanticException
*/
private Operator insertSelectForSemijoin(ArrayList<ASTNode> fields,
Operator input) throws SemanticException {
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
ArrayList<ExprNodeDesc> colList = new ArrayList<ExprNodeDesc>();
ArrayList<String> columnNames = new ArrayList<String>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
// construct the list of columns that need to be projected
for (ASTNode field : fields) {
ExprNodeColumnDesc exprNode = (ExprNodeColumnDesc) genExprNodeDesc(field,
inputRR);
colList.add(exprNode);
columnNames.add(exprNode.getColumn());
colExprMap.put(exprNode.getColumn(), exprNode);
}
// create selection operator
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
new SelectDesc(colList, columnNames, false), new RowSchema(inputRR
.getColumnInfos()), input), inputRR);
output.setColumnExprMap(colExprMap);
return output;
}
private Operator genMapGroupByForSemijoin(QB qb, ArrayList<ASTNode> fields, // the
// ASTNode
// of
// the
// join
// key
// "tab.col"
Operator inputOperatorInfo, GroupByDesc.Mode mode)
throws SemanticException {
RowResolver groupByInputRowResolver = opParseCtx.get(inputOperatorInfo)
.getRowResolver();
RowResolver groupByOutputRowResolver = new RowResolver();
ArrayList<ExprNodeDesc> groupByKeys = new ArrayList<ExprNodeDesc>();
ArrayList<String> outputColumnNames = new ArrayList<String>();
ArrayList<AggregationDesc> aggregations = new ArrayList<AggregationDesc>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
qb.getParseInfo();
groupByOutputRowResolver.setIsExprResolver(true); // join keys should only
// be columns but not be
// expressions
for (int i = 0; i < fields.size(); ++i) {
// get the group by keys to ColumnInfo
ASTNode colName = fields.get(i);
ExprNodeDesc grpByExprNode = genExprNodeDesc(colName,
groupByInputRowResolver);
groupByKeys.add(grpByExprNode);
// generate output column names
String field = getColumnInternalName(i);
outputColumnNames.add(field);
ColumnInfo colInfo2 = new ColumnInfo(field, grpByExprNode.getTypeInfo(),
"", false);
groupByOutputRowResolver.putExpression(colName, colInfo2);
// establish mapping from the output column to the input column
colExprMap.put(field, grpByExprNode);
}
// Generate group-by operator
float groupByMemoryUsage = HiveConf.getFloatVar(conf, HiveConf.ConfVars.HIVEMAPAGGRHASHMEMORY);
float memoryThreshold = HiveConf
.getFloatVar(conf, HiveConf.ConfVars.HIVEMAPAGGRMEMORYTHRESHOLD);
Operator op = putOpInsertMap(OperatorFactory.getAndMakeChild(
new GroupByDesc(mode, outputColumnNames, groupByKeys, aggregations,
false, groupByMemoryUsage, memoryThreshold, null, false, 0, false),
new RowSchema(groupByOutputRowResolver.getColumnInfos()),
inputOperatorInfo), groupByOutputRowResolver);
op.setColumnExprMap(colExprMap);
return op;
}
private ExprNodeDesc[][] genJoinOperatorTypeCheck(ExprNodeDesc[][] keys)
throws SemanticException {
// keys[i] -> ArrayList<exprNodeDesc> for the i-th join operator key list
int keyLength = 0;
for (int i = 0; i < keys.length; i++) {
if (i == 0) {
keyLength = keys[i].length;
} else {
assert keyLength == keys[i].length;
}
}
// implicit type conversion hierarchy
for (int k = 0; k < keyLength; k++) {
// Find the common class for type conversion
TypeInfo commonType = keys[0][k].getTypeInfo();
for (int i = 1; i < keys.length; i++) {
TypeInfo a = commonType;
TypeInfo b = keys[i][k].getTypeInfo();
commonType = FunctionRegistry.getCommonClassForComparison(a, b);
if (commonType == null) {
throw new SemanticException(
"Cannot do equality join on different types: " + a.getTypeName()
+ " and " + b.getTypeName());
}
}
// Add implicit type conversion if necessary
for (int i = 0; i < keys.length; i++) {
if (TypeInfoUtils.isConversionRequiredForComparison(
keys[i][k].getTypeInfo(), commonType)) {
keys[i][k] = ParseUtils.createConversionCast(
keys[i][k], (PrimitiveTypeInfo)commonType);
}
}
}
return keys;
}
private Operator genJoinPlan(QB qb, Map<String, Operator> map)
throws SemanticException {
QBJoinTree joinTree = qb.getQbJoinTree();
Operator joinOp = genJoinOperator(qb, joinTree, map, null);
return joinOp;
}
/**
* Extract the filters from the join condition and push them on top of the
* source operators. This procedure traverses the query tree recursively,
*/
private void pushJoinFilters(QB qb, QBJoinTree joinTree,
Map<String, Operator> map) throws SemanticException {
pushJoinFilters(qb, joinTree, map, true);
}
/**
* Extract the filters from the join condition and push them on top of the
* source operators. This procedure traverses the query tree recursively,
*/
private void pushJoinFilters(QB qb, QBJoinTree joinTree,
Map<String, Operator> map,
boolean recursively) throws SemanticException {
if ( recursively ) {
if (joinTree.getJoinSrc() != null) {
pushJoinFilters(qb, joinTree.getJoinSrc(), map);
}
}
ArrayList<ArrayList<ASTNode>> filters = joinTree.getFiltersForPushing();
int pos = 0;
for (String src : joinTree.getBaseSrc()) {
if (src != null) {
Operator srcOp = map.get(src);
ArrayList<ASTNode> filter = filters.get(pos);
for (ASTNode cond : filter) {
srcOp = genFilterPlan(qb, cond, srcOp);
}
map.put(src, srcOp);
}
pos++;
}
}
private List<String> getMapSideJoinTables(QB qb) {
List<String> cols = new ArrayList<String>();
ASTNode hints = qb.getParseInfo().getHints();
for (int pos = 0; pos < hints.getChildCount(); pos++) {
ASTNode hint = (ASTNode) hints.getChild(pos);
if (((ASTNode) hint.getChild(0)).getToken().getType() == HiveParser.TOK_MAPJOIN) {
// the user has specified to ignore mapjoin hint
if (!conf.getBoolVar(HiveConf.ConfVars.HIVEIGNOREMAPJOINHINT)
&& !conf.getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) {
ASTNode hintTblNames = (ASTNode) hint.getChild(1);
int numCh = hintTblNames.getChildCount();
for (int tblPos = 0; tblPos < numCh; tblPos++) {
String tblName = ((ASTNode) hintTblNames.getChild(tblPos)).getText()
.toLowerCase();
if (!cols.contains(tblName)) {
cols.add(tblName);
}
}
}
else {
queryProperties.setMapJoinRemoved(true);
}
}
}
return cols;
}
// The join alias is modified before being inserted for consumption by sort-merge
// join queries. If the join is part of a sub-query the alias is modified to include
// the sub-query alias.
private String getModifiedAlias(QB qb, String alias) {
return QB.getAppendedAliasFromId(qb.getId(), alias);
}
private QBJoinTree genUniqueJoinTree(QB qb, ASTNode joinParseTree,
Map<String, Operator> aliasToOpInfo)
throws SemanticException {
QBJoinTree joinTree = new QBJoinTree();
joinTree.setNoOuterJoin(false);
joinTree.setExpressions(new ArrayList<ArrayList<ASTNode>>());
joinTree.setFilters(new ArrayList<ArrayList<ASTNode>>());
joinTree.setFiltersForPushing(new ArrayList<ArrayList<ASTNode>>());
// Create joinTree structures to fill them up later
ArrayList<String> rightAliases = new ArrayList<String>();
ArrayList<String> leftAliases = new ArrayList<String>();
ArrayList<String> baseSrc = new ArrayList<String>();
ArrayList<Boolean> preserved = new ArrayList<Boolean>();
boolean lastPreserved = false;
int cols = -1;
for (int i = 0; i < joinParseTree.getChildCount(); i++) {
ASTNode child = (ASTNode) joinParseTree.getChild(i);
switch (child.getToken().getType()) {
case HiveParser.TOK_TABREF:
// Handle a table - populate aliases appropriately:
// leftAliases should contain the first table, rightAliases should
// contain all other tables and baseSrc should contain all tables
String tableName = getUnescapedUnqualifiedTableName((ASTNode) child.getChild(0));
String alias = child.getChildCount() == 1 ? tableName
: unescapeIdentifier(child.getChild(child.getChildCount() - 1)
.getText().toLowerCase());
if (i == 0) {
leftAliases.add(alias);
joinTree.setLeftAlias(alias);
} else {
rightAliases.add(alias);
}
joinTree.getAliasToOpInfo().put(
getModifiedAlias(qb, alias), aliasToOpInfo.get(alias));
joinTree.setId(qb.getId());
baseSrc.add(alias);
preserved.add(lastPreserved);
lastPreserved = false;
break;
case HiveParser.TOK_EXPLIST:
if (cols == -1 && child.getChildCount() != 0) {
cols = child.getChildCount();
} else if (child.getChildCount() != cols) {
throw new SemanticException("Tables with different or invalid "
+ "number of keys in UNIQUEJOIN");
}
ArrayList<ASTNode> expressions = new ArrayList<ASTNode>();
ArrayList<ASTNode> filt = new ArrayList<ASTNode>();
ArrayList<ASTNode> filters = new ArrayList<ASTNode>();
for (Node exp : child.getChildren()) {
expressions.add((ASTNode) exp);
}
joinTree.getExpressions().add(expressions);
joinTree.getFilters().add(filt);
joinTree.getFiltersForPushing().add(filters);
break;
case HiveParser.KW_PRESERVE:
lastPreserved = true;
break;
case HiveParser.TOK_SUBQUERY:
throw new SemanticException(
"Subqueries are not supported in UNIQUEJOIN");
default:
throw new SemanticException("Unexpected UNIQUEJOIN structure");
}
}
joinTree.setBaseSrc(baseSrc.toArray(new String[0]));
joinTree.setLeftAliases(leftAliases.toArray(new String[0]));
joinTree.setRightAliases(rightAliases.toArray(new String[0]));
JoinCond[] condn = new JoinCond[preserved.size()];
for (int i = 0; i < condn.length; i++) {
condn[i] = new JoinCond(preserved.get(i));
}
joinTree.setJoinCond(condn);
if (qb.getParseInfo().getHints() != null) {
parseStreamTables(joinTree, qb);
}
return joinTree;
}
/*
* Setup a QBJoinTree between a SubQuery and its Parent Query. The Parent Query
* is the lhs of the Join.
*
* The Parent Query is represented by the last Operator needed to process its From Clause.
* In case of a single table Query this will be a TableScan, but it can be a Join Operator
* if the Parent Query contains Join clauses, or in case of a single source from clause,
* the source could be a SubQuery or a PTF invocation.
*
* We setup the QBJoinTree with the above constrains in place. So:
* - the lhs of the QBJoinTree can be a another QBJoinTree if the Parent Query operator
* is a JoinOperator. In this case we get its QBJoinTree from the 'joinContext'
* - the rhs is always a reference to the SubQuery. Its alias is obtained from the
* QBSubQuery object.
*
* The QBSubQuery also provides the Joining Condition AST. The Joining Condition has been
* transformed in QBSubQuery setup, before this call. The Joining condition has any correlated
* predicates and a predicate for joining the Parent Query expression with the SubQuery.
*
* The QBSubQuery also specifies what kind of Join to construct.
*
* Given this information, once we initialize the QBJoinTree, we call the 'parseJoinCondition'
* method to validate and parse Join conditions.
*/
private QBJoinTree genSQJoinTree(QB qb, ISubQueryJoinInfo subQuery,
Operator joiningOp,
Map<String, Operator> aliasToOpInfo)
throws SemanticException {
QBJoinTree joinTree = new QBJoinTree();
JoinCond[] condn = new JoinCond[1];
switch (subQuery.getJoinType()) {
case LEFTOUTER:
joinTree.setNoOuterJoin(false);
condn[0] = new JoinCond(0, 1, JoinType.LEFTOUTER);
break;
case RIGHTOUTER:
joinTree.setNoOuterJoin(false);
condn[0] = new JoinCond(0, 1, JoinType.RIGHTOUTER);
break;
case FULLOUTER:
joinTree.setNoOuterJoin(false);
condn[0] = new JoinCond(0, 1, JoinType.FULLOUTER);
break;
case LEFTSEMI:
joinTree.setNoSemiJoin(false);
condn[0] = new JoinCond(0, 1, JoinType.LEFTSEMI);
break;
default:
condn[0] = new JoinCond(0, 1, JoinType.INNER);
joinTree.setNoOuterJoin(true);
break;
}
joinTree.setJoinCond(condn);
if ( joiningOp instanceof JoinOperator ) {
QBJoinTree leftTree = joinContext.get(joiningOp);
joinTree.setJoinSrc(leftTree);
String[] leftChildAliases = leftTree.getLeftAliases();
String leftAliases[] = new String[leftChildAliases.length + 1];
for (int i = 0; i < leftChildAliases.length; i++) {
leftAliases[i] = leftChildAliases[i];
}
leftAliases[leftChildAliases.length] = leftTree.getRightAliases()[0];
joinTree.setLeftAliases(leftAliases);
} else {
String alias = unescapeIdentifier(
SubQueryUtils.getAlias(joiningOp, aliasToOpInfo).toLowerCase());
joinTree.setLeftAlias(alias);
String[] leftAliases = new String[1];
leftAliases[0] = alias;
joinTree.setLeftAliases(leftAliases);
String[] children = new String[2];
children[0] = alias;
joinTree.setBaseSrc(children);
joinTree.setId(qb.getId());
joinTree.getAliasToOpInfo().put(
getModifiedAlias(qb, alias), aliasToOpInfo.get(alias));
}
String rightalias = unescapeIdentifier(subQuery.getAlias().toLowerCase());
String[] rightAliases = new String[1];
rightAliases[0] = rightalias;
joinTree.setRightAliases(rightAliases);
String[] children = joinTree.getBaseSrc();
if (children == null) {
children = new String[2];
}
children[1] = rightalias;
joinTree.setBaseSrc(children);
joinTree.setId(qb.getId());
joinTree.getAliasToOpInfo().put(
getModifiedAlias(qb, rightalias), aliasToOpInfo.get(rightalias));
// remember rhs table for semijoin
if (joinTree.getNoSemiJoin() == false) {
joinTree.addRHSSemijoin(rightalias);
}
ArrayList<ArrayList<ASTNode>> expressions = new ArrayList<ArrayList<ASTNode>>();
expressions.add(new ArrayList<ASTNode>());
expressions.add(new ArrayList<ASTNode>());
joinTree.setExpressions(expressions);
ArrayList<Boolean> nullsafes = new ArrayList<Boolean>();
joinTree.setNullSafes(nullsafes);
ArrayList<ArrayList<ASTNode>> filters = new ArrayList<ArrayList<ASTNode>>();
filters.add(new ArrayList<ASTNode>());
filters.add(new ArrayList<ASTNode>());
joinTree.setFilters(filters);
joinTree.setFilterMap(new int[2][]);
ArrayList<ArrayList<ASTNode>> filtersForPushing =
new ArrayList<ArrayList<ASTNode>>();
filtersForPushing.add(new ArrayList<ASTNode>());
filtersForPushing.add(new ArrayList<ASTNode>());
joinTree.setFiltersForPushing(filtersForPushing);
ASTNode joinCond = subQuery.getJoinConditionAST();
ArrayList<String> leftSrc = new ArrayList<String>();
parseJoinCondition(joinTree, joinCond, leftSrc, aliasToOpInfo);
if (leftSrc.size() == 1) {
joinTree.setLeftAlias(leftSrc.get(0));
}
return joinTree;
}
private QBJoinTree genJoinTree(QB qb, ASTNode joinParseTree,
Map<String, Operator> aliasToOpInfo)
throws SemanticException {
QBJoinTree joinTree = new QBJoinTree();
JoinCond[] condn = new JoinCond[1];
switch (joinParseTree.getToken().getType()) {
case HiveParser.TOK_LEFTOUTERJOIN:
joinTree.setNoOuterJoin(false);
condn[0] = new JoinCond(0, 1, JoinType.LEFTOUTER);
break;
case HiveParser.TOK_RIGHTOUTERJOIN:
joinTree.setNoOuterJoin(false);
condn[0] = new JoinCond(0, 1, JoinType.RIGHTOUTER);
break;
case HiveParser.TOK_FULLOUTERJOIN:
joinTree.setNoOuterJoin(false);
condn[0] = new JoinCond(0, 1, JoinType.FULLOUTER);
break;
case HiveParser.TOK_LEFTSEMIJOIN:
joinTree.setNoSemiJoin(false);
condn[0] = new JoinCond(0, 1, JoinType.LEFTSEMI);
break;
default:
condn[0] = new JoinCond(0, 1, JoinType.INNER);
joinTree.setNoOuterJoin(true);
break;
}
joinTree.setJoinCond(condn);
ASTNode left = (ASTNode) joinParseTree.getChild(0);
ASTNode right = (ASTNode) joinParseTree.getChild(1);
if ((left.getToken().getType() == HiveParser.TOK_TABREF)
|| (left.getToken().getType() == HiveParser.TOK_SUBQUERY)
|| (left.getToken().getType() == HiveParser.TOK_PTBLFUNCTION)) {
String tableName = getUnescapedUnqualifiedTableName((ASTNode) left.getChild(0))
.toLowerCase();
String alias = extractJoinAlias(left, tableName);
joinTree.setLeftAlias(alias);
String[] leftAliases = new String[1];
leftAliases[0] = alias;
joinTree.setLeftAliases(leftAliases);
String[] children = new String[2];
children[0] = alias;
joinTree.setBaseSrc(children);
joinTree.setId(qb.getId());
joinTree.getAliasToOpInfo().put(
getModifiedAlias(qb, alias), aliasToOpInfo.get(alias));
} else if (isJoinToken(left)) {
QBJoinTree leftTree = genJoinTree(qb, left, aliasToOpInfo);
joinTree.setJoinSrc(leftTree);
String[] leftChildAliases = leftTree.getLeftAliases();
String leftAliases[] = new String[leftChildAliases.length + 1];
for (int i = 0; i < leftChildAliases.length; i++) {
leftAliases[i] = leftChildAliases[i];
}
leftAliases[leftChildAliases.length] = leftTree.getRightAliases()[0];
joinTree.setLeftAliases(leftAliases);
} else {
assert (false);
}
if ((right.getToken().getType() == HiveParser.TOK_TABREF)
|| (right.getToken().getType() == HiveParser.TOK_SUBQUERY)
|| (right.getToken().getType() == HiveParser.TOK_PTBLFUNCTION)) {
String tableName = getUnescapedUnqualifiedTableName((ASTNode) right.getChild(0))
.toLowerCase();
String alias = extractJoinAlias(right, tableName);
String[] rightAliases = new String[1];
rightAliases[0] = alias;
joinTree.setRightAliases(rightAliases);
String[] children = joinTree.getBaseSrc();
if (children == null) {
children = new String[2];
}
children[1] = alias;
joinTree.setBaseSrc(children);
joinTree.setId(qb.getId());
joinTree.getAliasToOpInfo().put(
getModifiedAlias(qb, alias), aliasToOpInfo.get(alias));
// remember rhs table for semijoin
if (joinTree.getNoSemiJoin() == false) {
joinTree.addRHSSemijoin(alias);
}
} else {
assert false;
}
ArrayList<ArrayList<ASTNode>> expressions = new ArrayList<ArrayList<ASTNode>>();
expressions.add(new ArrayList<ASTNode>());
expressions.add(new ArrayList<ASTNode>());
joinTree.setExpressions(expressions);
ArrayList<Boolean> nullsafes = new ArrayList<Boolean>();
joinTree.setNullSafes(nullsafes);
ArrayList<ArrayList<ASTNode>> filters = new ArrayList<ArrayList<ASTNode>>();
filters.add(new ArrayList<ASTNode>());
filters.add(new ArrayList<ASTNode>());
joinTree.setFilters(filters);
joinTree.setFilterMap(new int[2][]);
ArrayList<ArrayList<ASTNode>> filtersForPushing =
new ArrayList<ArrayList<ASTNode>>();
filtersForPushing.add(new ArrayList<ASTNode>());
filtersForPushing.add(new ArrayList<ASTNode>());
joinTree.setFiltersForPushing(filtersForPushing);
ASTNode joinCond = (ASTNode) joinParseTree.getChild(2);
ArrayList<String> leftSrc = new ArrayList<String>();
parseJoinCondition(joinTree, joinCond, leftSrc, aliasToOpInfo);
if (leftSrc.size() == 1) {
joinTree.setLeftAlias(leftSrc.get(0));
}
// check the hints to see if the user has specified a map-side join. This
// will be removed later on, once the cost-based
// infrastructure is in place
if (qb.getParseInfo().getHints() != null) {
List<String> mapSideTables = getMapSideJoinTables(qb);
List<String> mapAliases = joinTree.getMapAliases();
for (String mapTbl : mapSideTables) {
boolean mapTable = false;
for (String leftAlias : joinTree.getLeftAliases()) {
if (mapTbl.equalsIgnoreCase(leftAlias)) {
mapTable = true;
}
}
for (String rightAlias : joinTree.getRightAliases()) {
if (mapTbl.equalsIgnoreCase(rightAlias)) {
mapTable = true;
}
}
if (mapTable) {
if (mapAliases == null) {
mapAliases = new ArrayList<String>();
}
mapAliases.add(mapTbl);
joinTree.setMapSideJoin(true);
}
}
joinTree.setMapAliases(mapAliases);
parseStreamTables(joinTree, qb);
}
return joinTree;
}
private String extractJoinAlias(ASTNode node, String tableName) {
// ptf node form is:
// ^(TOK_PTBLFUNCTION $name $alias? partitionTableFunctionSource partitioningSpec? expression*)
// guaranteed to have an alias here: check done in processJoin
if (node.getType() == HiveParser.TOK_PTBLFUNCTION) {
return unescapeIdentifier(node.getChild(1).getText().toLowerCase());
}
if (node.getChildCount() == 1) {
return tableName;
}
for (int i = node.getChildCount() - 1; i >= 1; i--) {
if (node.getChild(i).getType() == HiveParser.Identifier) {
return unescapeIdentifier(node.getChild(i).getText().toLowerCase());
}
}
return tableName;
}
private void parseStreamTables(QBJoinTree joinTree, QB qb) {
List<String> streamAliases = joinTree.getStreamAliases();
for (Node hintNode : qb.getParseInfo().getHints().getChildren()) {
ASTNode hint = (ASTNode) hintNode;
if (hint.getChild(0).getType() == HiveParser.TOK_STREAMTABLE) {
for (int i = 0; i < hint.getChild(1).getChildCount(); i++) {
if (streamAliases == null) {
streamAliases = new ArrayList<String>();
}
streamAliases.add(hint.getChild(1).getChild(i).getText());
}
}
}
joinTree.setStreamAliases(streamAliases);
}
/**
* Merges node to target
*/
private void mergeJoins(QB qb, QBJoinTree node, QBJoinTree target, int pos, int[] tgtToNodeExprMap) {
String[] nodeRightAliases = node.getRightAliases();
String[] trgtRightAliases = target.getRightAliases();
String[] rightAliases = new String[nodeRightAliases.length
+ trgtRightAliases.length];
for (int i = 0; i < trgtRightAliases.length; i++) {
rightAliases[i] = trgtRightAliases[i];
}
for (int i = 0; i < nodeRightAliases.length; i++) {
rightAliases[i + trgtRightAliases.length] = nodeRightAliases[i];
}
target.setRightAliases(rightAliases);
target.getAliasToOpInfo().putAll(node.getAliasToOpInfo());
String[] nodeBaseSrc = node.getBaseSrc();
String[] trgtBaseSrc = target.getBaseSrc();
String[] baseSrc = new String[nodeBaseSrc.length + trgtBaseSrc.length - 1];
for (int i = 0; i < trgtBaseSrc.length; i++) {
baseSrc[i] = trgtBaseSrc[i];
}
for (int i = 1; i < nodeBaseSrc.length; i++) {
baseSrc[i + trgtBaseSrc.length - 1] = nodeBaseSrc[i];
}
target.setBaseSrc(baseSrc);
ArrayList<ArrayList<ASTNode>> expr = target.getExpressions();
for (int i = 0; i < nodeRightAliases.length; i++) {
List<ASTNode> nodeConds = node.getExpressions().get(i + 1);
ArrayList<ASTNode> reordereNodeConds = new ArrayList<ASTNode>();
for(int k=0; k < tgtToNodeExprMap.length; k++) {
reordereNodeConds.add(nodeConds.get(k));
}
expr.add(reordereNodeConds);
}
ArrayList<Boolean> nns = node.getNullSafes();
ArrayList<Boolean> tns = target.getNullSafes();
for (int i = 0; i < tns.size(); i++) {
tns.set(i, tns.get(i) & nns.get(i)); // any of condition contains non-NS, non-NS
}
ArrayList<ArrayList<ASTNode>> filters = target.getFilters();
for (int i = 0; i < nodeRightAliases.length; i++) {
filters.add(node.getFilters().get(i + 1));
}
if (node.getFilters().get(0).size() != 0) {
ArrayList<ASTNode> filterPos = filters.get(pos);
filterPos.addAll(node.getFilters().get(0));
}
int[][] nmap = node.getFilterMap();
int[][] tmap = target.getFilterMap();
int[][] newmap = new int[tmap.length + nmap.length - 1][];
for (int[] mapping : nmap) {
if (mapping != null) {
for (int i = 0; i < mapping.length; i += 2) {
if (pos > 0 || mapping[i] > 0) {
mapping[i] += trgtRightAliases.length;
}
}
}
}
if (nmap[0] != null) {
if (tmap[pos] == null) {
tmap[pos] = nmap[0];
} else {
int[] appended = new int[tmap[pos].length + nmap[0].length];
System.arraycopy(tmap[pos], 0, appended, 0, tmap[pos].length);
System.arraycopy(nmap[0], 0, appended, tmap[pos].length, nmap[0].length);
tmap[pos] = appended;
}
}
System.arraycopy(tmap, 0, newmap, 0, tmap.length);
System.arraycopy(nmap, 1, newmap, tmap.length, nmap.length - 1);
target.setFilterMap(newmap);
ArrayList<ArrayList<ASTNode>> filter = target.getFiltersForPushing();
for (int i = 0; i < nodeRightAliases.length; i++) {
filter.add(node.getFiltersForPushing().get(i + 1));
}
if (node.getFiltersForPushing().get(0).size() != 0) {
/*
* for each predicate:
* - does it refer to one or many aliases
* - if one: add it to the filterForPushing list of that alias
* - if many: add as a filter from merging trees.
*/
for(ASTNode nodeFilter : node.getFiltersForPushing().get(0) ) {
int fPos = ParseUtils.checkJoinFilterRefersOneAlias(target.getBaseSrc(), nodeFilter);
if ( fPos != - 1 ) {
filter.get(fPos).add(nodeFilter);
} else {
target.addPostJoinFilter(nodeFilter);
}
}
}
if (node.getNoOuterJoin() && target.getNoOuterJoin()) {
target.setNoOuterJoin(true);
} else {
target.setNoOuterJoin(false);
}
if (node.getNoSemiJoin() && target.getNoSemiJoin()) {
target.setNoSemiJoin(true);
} else {
target.setNoSemiJoin(false);
}
target.mergeRHSSemijoin(node);
JoinCond[] nodeCondns = node.getJoinCond();
int nodeCondnsSize = nodeCondns.length;
JoinCond[] targetCondns = target.getJoinCond();
int targetCondnsSize = targetCondns.length;
JoinCond[] newCondns = new JoinCond[nodeCondnsSize + targetCondnsSize];
for (int i = 0; i < targetCondnsSize; i++) {
newCondns[i] = targetCondns[i];
}
for (int i = 0; i < nodeCondnsSize; i++) {
JoinCond nodeCondn = nodeCondns[i];
if (nodeCondn.getLeft() == 0) {
nodeCondn.setLeft(pos);
} else {
nodeCondn.setLeft(nodeCondn.getLeft() + targetCondnsSize);
}
nodeCondn.setRight(nodeCondn.getRight() + targetCondnsSize);
newCondns[targetCondnsSize + i] = nodeCondn;
}
target.setJoinCond(newCondns);
if (target.isMapSideJoin()) {
assert node.isMapSideJoin();
List<String> mapAliases = target.getMapAliases();
for (String mapTbl : node.getMapAliases()) {
if (!mapAliases.contains(mapTbl)) {
mapAliases.add(mapTbl);
}
}
target.setMapAliases(mapAliases);
}
}
private ObjectPair<Integer, int[]> findMergePos(QBJoinTree node, QBJoinTree target) {
int res = -1;
String leftAlias = node.getLeftAlias();
if (leftAlias == null) {
return new ObjectPair(-1, null);
}
ArrayList<ASTNode> nodeCondn = node.getExpressions().get(0);
ArrayList<ASTNode> targetCondn = null;
if (leftAlias.equals(target.getLeftAlias())) {
targetCondn = target.getExpressions().get(0);
res = 0;
} else {
for (int i = 0; i < target.getRightAliases().length; i++) {
if (leftAlias.equals(target.getRightAliases()[i])) {
targetCondn = target.getExpressions().get(i + 1);
res = i + 1;
break;
}
}
}
if ( targetCondn == null ) {
return new ObjectPair(-1, null);
}
/*
* The order of the join condition expressions don't matter.
* A merge can happen:
* - if every target condition is present in some position of the node condition list.
* - there is no node condition, which is not equal to any target condition.
*/
int[] tgtToNodeExprMap = new int[targetCondn.size()];
boolean[] nodeFiltersMapped = new boolean[nodeCondn.size()];
int i, j;
for(i=0; i<targetCondn.size(); i++) {
String tgtExprTree = targetCondn.get(i).toStringTree();
tgtToNodeExprMap[i] = -1;
for(j=0; j < nodeCondn.size(); j++) {
if ( nodeCondn.get(j).toStringTree().equals(tgtExprTree)) {
tgtToNodeExprMap[i] = j;
nodeFiltersMapped[j] = true;
}
}
if ( tgtToNodeExprMap[i] == -1) {
return new ObjectPair(-1, null);
}
}
for(j=0; j < nodeCondn.size(); j++) {
if ( !nodeFiltersMapped[j]) {
return new ObjectPair(-1, null);
}
}
return new ObjectPair(res, tgtToNodeExprMap);
}
// try merge join tree from inner most source
// (it was merged from outer most to inner, which could be invalid)
//
// in a join tree ((A-B)-C)-D where C is not mergeable with A-B,
// D can be merged with A-B into single join If and only if C and D has same join type
// In this case, A-B-D join will be executed first and ABD-C join will be executed in next
private void mergeJoinTree(QB qb) {
QBJoinTree tree = qb.getQbJoinTree();
if (tree.getJoinSrc() == null) {
return;
}
// make array with QBJoinTree : outer most(0) --> inner most(n)
List<QBJoinTree> trees = new ArrayList<QBJoinTree>();
for (;tree != null; tree = tree.getJoinSrc()) {
trees.add(tree);
}
// merging from 'target'(inner) to 'node'(outer)
for (int i = trees.size() - 1; i >= 0; i--) {
QBJoinTree target = trees.get(i);
if (target == null) {
continue;
}
JoinType prevType = null; // save join type
for (int j = i - 1; j >= 0; j--) {
QBJoinTree node = trees.get(j);
if (node == null) {
continue;
}
JoinType currType = getType(node.getJoinCond());
if (prevType != null && prevType != currType) {
break;
}
ObjectPair<Integer, int[]> mergeDetails = findMergePos(node, target);
int pos = mergeDetails.getFirst();
if (pos >= 0) {
// for outer joins, it should not exceed 16 aliases (short type)
if (!node.getNoOuterJoin() || !target.getNoOuterJoin()) {
if (node.getRightAliases().length + target.getRightAliases().length + 1 > 16) {
LOG.info(ErrorMsg.JOINNODE_OUTERJOIN_MORETHAN_16);
continue;
}
}
mergeJoins(qb, node, target, pos, mergeDetails.getSecond());
trees.set(j, null);
continue; // continue merging with next alias
}
if (prevType == null) {
prevType = currType;
}
}
}
// reconstruct join tree
QBJoinTree current = null;
for (int i = 0; i < trees.size(); i++) {
QBJoinTree target = trees.get(i);
if (target == null) {
continue;
}
if (current == null) {
qb.setQbJoinTree(current = target);
} else {
current.setJoinSrc(target);
current = target;
}
}
}
// Join types should be all the same for merging (or returns null)
private JoinType getType(JoinCond[] conds) {
JoinType type = conds[0].getJoinType();
for (int k = 1; k < conds.length; k++) {
if (type != conds[k].getJoinType()) {
return null;
}
}
return type;
}
private Operator insertSelectAllPlanForGroupBy(Operator input)
throws SemanticException {
OpParseContext inputCtx = opParseCtx.get(input);
RowResolver inputRR = inputCtx.getRowResolver();
ArrayList<ColumnInfo> columns = inputRR.getColumnInfos();
ArrayList<ExprNodeDesc> colList = new ArrayList<ExprNodeDesc>();
ArrayList<String> columnNames = new ArrayList<String>();
Map<String, ExprNodeDesc> columnExprMap =
new HashMap<String, ExprNodeDesc>();
for (int i = 0; i < columns.size(); i++) {
ColumnInfo col = columns.get(i);
colList.add(new ExprNodeColumnDesc(col.getType(), col.getInternalName(),
col.getTabAlias(), col.getIsVirtualCol()));
columnNames.add(col.getInternalName());
columnExprMap.put(col.getInternalName(),
new ExprNodeColumnDesc(col.getType(), col.getInternalName(),
col.getTabAlias(), col.getIsVirtualCol()));
}
Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
new SelectDesc(colList, columnNames, true), new RowSchema(inputRR
.getColumnInfos()), input), inputRR);
output.setColumnExprMap(columnExprMap);
return output;
}
// Return the common distinct expression
// There should be more than 1 destination, with group bys in all of them.
private List<ASTNode> getCommonDistinctExprs(QB qb, Operator input) {
QBParseInfo qbp = qb.getParseInfo();
// If a grouping set aggregation is present, common processing is not possible
if (!qbp.getDestCubes().isEmpty() || !qbp.getDestRollups().isEmpty()
|| !qbp.getDestToLateralView().isEmpty()) {
return null;
}
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
TreeSet<String> ks = new TreeSet<String>();
ks.addAll(qbp.getClauseNames());
// Go over all the destination tables
if (ks.size() <= 1) {
return null;
}
List<ExprNodeDesc> oldList = null;
List<ASTNode> oldASTList = null;
for (String dest : ks) {
// If a filter is present, common processing is not possible
if (qbp.getWhrForClause(dest) != null) {
return null;
}
if (qbp.getAggregationExprsForClause(dest).size() == 0
&& getGroupByForClause(qbp, dest).size() == 0) {
return null;
}
// All distinct expressions must be the same
List<ASTNode> list = qbp.getDistinctFuncExprsForClause(dest);
if (list.isEmpty()) {
return null;
}
List<ExprNodeDesc> currDestList;
try {
currDestList = getDistinctExprs(qbp, dest, inputRR);
} catch (SemanticException e) {
return null;
}
List<ASTNode> currASTList = new ArrayList<ASTNode>();
for (ASTNode value : list) {
// 0 is function name
for (int i = 1; i < value.getChildCount(); i++) {
ASTNode parameter = (ASTNode) value.getChild(i);
currASTList.add(parameter);
}
if (oldList == null) {
oldList = currDestList;
oldASTList = currASTList;
} else {
if (!matchExprLists(oldList, currDestList)) {
return null;
}
}
}
}
return oldASTList;
}
private Operator createCommonReduceSink(QB qb, Operator input)
throws SemanticException {
// Go over all the tables and extract the common distinct key
List<ASTNode> distExprs = getCommonDistinctExprs(qb, input);
QBParseInfo qbp = qb.getParseInfo();
TreeSet<String> ks = new TreeSet<String>();
ks.addAll(qbp.getClauseNames());
// Pass the entire row
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
RowResolver reduceSinkOutputRowResolver = new RowResolver();
reduceSinkOutputRowResolver.setIsExprResolver(true);
ArrayList<ExprNodeDesc> reduceKeys = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> reduceValues = new ArrayList<ExprNodeDesc>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
// Pre-compute distinct group-by keys and store in reduceKeys
List<String> outputColumnNames = new ArrayList<String>();
for (ASTNode distn : distExprs) {
ExprNodeDesc distExpr = genExprNodeDesc(distn, inputRR);
if (reduceSinkOutputRowResolver.getExpression(distn) == null) {
reduceKeys.add(distExpr);
outputColumnNames.add(getColumnInternalName(reduceKeys.size() - 1));
String field = Utilities.ReduceField.KEY.toString() + "."
+ getColumnInternalName(reduceKeys.size() - 1);
ColumnInfo colInfo = new ColumnInfo(field, reduceKeys.get(
reduceKeys.size() - 1).getTypeInfo(), "", false);
reduceSinkOutputRowResolver.putExpression(distn, colInfo);
colExprMap.put(colInfo.getInternalName(), distExpr);
}
}
// Go over all the grouping keys and aggregations
for (String dest : ks) {
List<ASTNode> grpByExprs = getGroupByForClause(qbp, dest);
for (int i = 0; i < grpByExprs.size(); ++i) {
ASTNode grpbyExpr = grpByExprs.get(i);
if (reduceSinkOutputRowResolver.getExpression(grpbyExpr) == null) {
ExprNodeDesc grpByExprNode = genExprNodeDesc(grpbyExpr, inputRR);
reduceValues.add(grpByExprNode);
String field = Utilities.ReduceField.VALUE.toString() + "."
+ getColumnInternalName(reduceValues.size() - 1);
ColumnInfo colInfo = new ColumnInfo(field, reduceValues.get(
reduceValues.size() - 1).getTypeInfo(), "", false);
reduceSinkOutputRowResolver.putExpression(grpbyExpr, colInfo);
outputColumnNames.add(getColumnInternalName(reduceValues.size() - 1));
colExprMap.put(field, grpByExprNode);
}
}
// For each aggregation
HashMap<String, ASTNode> aggregationTrees = qbp
.getAggregationExprsForClause(dest);
assert (aggregationTrees != null);
for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
ASTNode value = entry.getValue();
// 0 is the function name
for (int i = 1; i < value.getChildCount(); i++) {
ASTNode paraExpr = (ASTNode) value.getChild(i);
if (reduceSinkOutputRowResolver.getExpression(paraExpr) == null) {
ExprNodeDesc paraExprNode = genExprNodeDesc(paraExpr, inputRR);
reduceValues.add(paraExprNode);
String field = Utilities.ReduceField.VALUE.toString() + "."
+ getColumnInternalName(reduceValues.size() - 1);
ColumnInfo colInfo = new ColumnInfo(field, reduceValues.get(
reduceValues.size() - 1).getTypeInfo(), "", false);
reduceSinkOutputRowResolver.putExpression(paraExpr, colInfo);
outputColumnNames
.add(getColumnInternalName(reduceValues.size() - 1));
colExprMap.put(field, paraExprNode);
}
}
}
}
ReduceSinkOperator rsOp = (ReduceSinkOperator) putOpInsertMap(
OperatorFactory.getAndMakeChild(PlanUtils.getReduceSinkDesc(reduceKeys,
reduceValues, outputColumnNames, true, -1, reduceKeys.size(), -1),
new RowSchema(reduceSinkOutputRowResolver.getColumnInfos()), input),
reduceSinkOutputRowResolver);
rsOp.setColumnExprMap(colExprMap);
return rsOp;
}
// Groups the clause names into lists so that any two clauses in the same list has the same
// group by and distinct keys and no clause appears in more than one list. Returns a list of the
// lists of clauses.
private List<List<String>> getCommonGroupByDestGroups(QB qb,
Map<String, Operator<? extends OperatorDesc>> inputs) throws SemanticException {
QBParseInfo qbp = qb.getParseInfo();
TreeSet<String> ks = new TreeSet<String>();
ks.addAll(qbp.getClauseNames());
List<List<String>> commonGroupByDestGroups = new ArrayList<List<String>>();
// If this is a trivial query block return
if (ks.size() <= 1) {
List<String> oneList = new ArrayList<String>(1);
if (ks.size() == 1) {
oneList.add(ks.first());
}
commonGroupByDestGroups.add(oneList);
return commonGroupByDestGroups;
}
List<Operator<? extends OperatorDesc>> inputOperators =
new ArrayList<Operator<? extends OperatorDesc>>(ks.size());
List<List<ExprNodeDesc>> sprayKeyLists = new ArrayList<List<ExprNodeDesc>>(ks.size());
List<List<ExprNodeDesc>> distinctKeyLists = new ArrayList<List<ExprNodeDesc>>(ks.size());
// Iterate over each clause
for (String dest : ks) {
Operator input = inputs.get(dest);
RowResolver inputRR = opParseCtx.get(input).getRowResolver();
List<ExprNodeDesc> distinctKeys = getDistinctExprs(qbp, dest, inputRR);
List<ExprNodeDesc> sprayKeys = new ArrayList<ExprNodeDesc>();
// Add the group by expressions
List<ASTNode> grpByExprs = getGroupByForClause(qbp, dest);
for (ASTNode grpByExpr : grpByExprs) {
ExprNodeDesc exprDesc = genExprNodeDesc(grpByExpr, inputRR);
if (ExprNodeDescUtils.indexOf(exprDesc, sprayKeys) < 0) {
sprayKeys.add(exprDesc);
}
}
// Loop through each of the lists of exprs, looking for a match
boolean found = false;
for (int i = 0; i < sprayKeyLists.size(); i++) {
if (!input.equals(inputOperators.get(i))) {
continue;
}
if (distinctKeys.isEmpty()) {
// current dest has no distinct keys.
List<ExprNodeDesc> combinedList = new ArrayList<ExprNodeDesc>();
combineExprNodeLists(sprayKeyLists.get(i), distinctKeyLists.get(i), combinedList);
if (!matchExprLists(combinedList, sprayKeys)) {
continue;
} // else do the common code at the end.
} else {
if (distinctKeyLists.get(i).isEmpty()) {
List<ExprNodeDesc> combinedList = new ArrayList<ExprNodeDesc>();
combineExprNodeLists(sprayKeys, distinctKeys, combinedList);
if (!matchExprLists(combinedList, sprayKeyLists.get(i))) {
continue;
} else {
// we have found a match. insert this distinct clause to head.
distinctKeyLists.remove(i);
sprayKeyLists.remove(i);
distinctKeyLists.add(i, distinctKeys);
sprayKeyLists.add(i, sprayKeys);
commonGroupByDestGroups.get(i).add(0, dest);
found = true;
break;
}
} else {
if (!matchExprLists(distinctKeyLists.get(i), distinctKeys)) {
continue;
}
if (!matchExprLists(sprayKeyLists.get(i), sprayKeys)) {
continue;
}
// else do common code
}
}
// common code
// A match was found, so add the clause to the corresponding list
commonGroupByDestGroups.get(i).add(dest);
found = true;
break;
}
// No match was found, so create new entries
if (!found) {
inputOperators.add(input);
sprayKeyLists.add(sprayKeys);
distinctKeyLists.add(distinctKeys);
List<String> destGroup = new ArrayList<String>();
destGroup.add(dest);
commonGroupByDestGroups.add(destGroup);
}
}
return commonGroupByDestGroups;
}
private void combineExprNodeLists(List<ExprNodeDesc> list, List<ExprNodeDesc> list2,
List<ExprNodeDesc> combinedList) {
combinedList.addAll(list);
for (ExprNodeDesc elem : list2) {
if (!combinedList.contains(elem)) {
combinedList.add(elem);
}
}
}
// Returns whether or not two lists contain the same elements independent of order
private boolean matchExprLists(List<ExprNodeDesc> list1, List<ExprNodeDesc> list2) {
if (list1.size() != list2.size()) {
return false;
}
for (ExprNodeDesc exprNodeDesc : list1) {
if (ExprNodeDescUtils.indexOf(exprNodeDesc, list2) < 0) {
return false;
}
}
return true;
}
// Returns a list of the distinct exprs without duplicates for a given clause name
private List<ExprNodeDesc> getDistinctExprs(QBParseInfo qbp, String dest, RowResolver inputRR)
throws SemanticException {
List<ASTNode> distinctAggExprs = qbp.getDistinctFuncExprsForClause(dest);
List<ExprNodeDesc> distinctExprs = new ArrayList<ExprNodeDesc>();
for (ASTNode distinctAggExpr : distinctAggExprs) {
// 0 is function name
for (int i = 1; i < distinctAggExpr.getChildCount(); i++) {
ASTNode parameter = (ASTNode) distinctAggExpr.getChild(i);
ExprNodeDesc expr = genExprNodeDesc(parameter, inputRR);
if (ExprNodeDescUtils.indexOf(expr, distinctExprs) < 0) {
distinctExprs.add(expr);
}
}
}
return distinctExprs;
}
// see if there are any distinct expressions
private boolean distinctExprsExists(QB qb) {
QBParseInfo qbp = qb.getParseInfo();
TreeSet<String> ks = new TreeSet<String>();
ks.addAll(qbp.getClauseNames());
for (String dest : ks) {
List<ASTNode> list = qbp.getDistinctFuncExprsForClause(dest);
if (!list.isEmpty()) {
return true;
}
}
return false;
}
@SuppressWarnings("nls")
private Operator genBodyPlan(QB qb, Operator input, Map<String, Operator> aliasToOpInfo)
throws SemanticException {
QBParseInfo qbp = qb.getParseInfo();
TreeSet<String> ks = new TreeSet<String>(qbp.getClauseNames());
Map<String, Operator<? extends OperatorDesc>> inputs = createInputForDests(qb, input, ks);
// For multi-group by with the same distinct, we ignore all user hints
// currently. It doesnt matter whether he has asked to do
// map-side aggregation or not. Map side aggregation is turned off
List<ASTNode> commonDistinctExprs = getCommonDistinctExprs(qb, input);
// Consider a query like:
//
// from src
// insert overwrite table dest1 select col1, count(distinct colx) group by col1
// insert overwrite table dest2 select col2, count(distinct colx) group by col2;
//
// With HIVE_OPTIMIZE_MULTI_GROUPBY_COMMON_DISTINCTS set to true, first we spray by the distinct
// value (colx), and then perform the 2 groups bys. This makes sense if map-side aggregation is
// turned off. However, with maps-side aggregation, it might be useful in some cases to treat
// the 2 inserts independently, thereby performing the query above in 2MR jobs instead of 3
// (due to spraying by distinct key first).
boolean optimizeMultiGroupBy = commonDistinctExprs != null &&
conf.getBoolVar(HiveConf.ConfVars.HIVE_OPTIMIZE_MULTI_GROUPBY_COMMON_DISTINCTS);
Operator curr = input;
// if there is a single distinct, optimize that. Spray initially by the
// distinct key,
// no computation at the mapper. Have multiple group by operators at the
// reducer - and then
// proceed
if (optimizeMultiGroupBy) {
curr = createCommonReduceSink(qb, input);
RowResolver currRR = opParseCtx.get(curr).getRowResolver();
// create a forward operator
input = putOpInsertMap(OperatorFactory.getAndMakeChild(new ForwardDesc(),
new RowSchema(currRR.getColumnInfos()), curr), currRR);
for (String dest : ks) {
curr = input;
curr = genGroupByPlan2MRMultiGroupBy(dest, qb, curr);
curr = genSelectPlan(dest, qb, curr);
Integer limit = qbp.getDestLimit(dest);
if (limit != null) {
curr = genLimitMapRedPlan(dest, qb, curr, limit.intValue(), true);
qb.getParseInfo().setOuterQueryLimit(limit.intValue());
}
curr = genFileSinkPlan(dest, qb, curr);
}
} else {
List<List<String>> commonGroupByDestGroups = null;
// If we can put multiple group bys in a single reducer, determine suitable groups of
// expressions, otherwise treat all the expressions as a single group
if (conf.getBoolVar(HiveConf.ConfVars.HIVEMULTIGROUPBYSINGLEREDUCER)) {
try {
commonGroupByDestGroups = getCommonGroupByDestGroups(qb, inputs);
} catch (SemanticException e) {
LOG.error("Failed to group clauses by common spray keys.", e);
}
}
if (commonGroupByDestGroups == null) {
commonGroupByDestGroups = new ArrayList<List<String>>();
commonGroupByDestGroups.add(new ArrayList<String>(ks));
}
if (!commonGroupByDestGroups.isEmpty()) {
// Iterate over each group of subqueries with the same group by/distinct keys
for (List<String> commonGroupByDestGroup : commonGroupByDestGroups) {
if (commonGroupByDestGroup.isEmpty()) {
continue;
}
String firstDest = commonGroupByDestGroup.get(0);
input = inputs.get(firstDest);
// Constructs a standard group by plan if:
// There is no other subquery with the same group by/distinct keys or
// (There are no aggregations in a representative query for the group and
// There is no group by in that representative query) or
// The data is skewed or
// The conf variable used to control combining group bys into a single reducer is false
if (commonGroupByDestGroup.size() == 1 ||
(qbp.getAggregationExprsForClause(firstDest).size() == 0 &&
getGroupByForClause(qbp, firstDest).size() == 0) ||
conf.getBoolVar(HiveConf.ConfVars.HIVEGROUPBYSKEW) ||
!conf.getBoolVar(HiveConf.ConfVars.HIVEMULTIGROUPBYSINGLEREDUCER)) {
// Go over all the destination tables
for (String dest : commonGroupByDestGroup) {
curr = inputs.get(dest);
if (qbp.getWhrForClause(dest) != null) {
ASTNode whereExpr = qb.getParseInfo().getWhrForClause(dest);
curr = genFilterPlan((ASTNode) whereExpr.getChild(0), qb, curr, aliasToOpInfo, false);
}
if (qbp.getAggregationExprsForClause(dest).size() != 0
|| getGroupByForClause(qbp, dest).size() > 0) {
// multiple distincts is not supported with skew in data
if (conf.getBoolVar(HiveConf.ConfVars.HIVEGROUPBYSKEW) &&
qbp.getDistinctFuncExprsForClause(dest).size() > 1) {
throw new SemanticException(ErrorMsg.UNSUPPORTED_MULTIPLE_DISTINCTS.
getMsg());
}
// insert a select operator here used by the ColumnPruner to reduce
// the data to shuffle
curr = insertSelectAllPlanForGroupBy(curr);
if (conf.getBoolVar(HiveConf.ConfVars.HIVEMAPSIDEAGGREGATE)) {
if (!conf.getBoolVar(HiveConf.ConfVars.HIVEGROUPBYSKEW)) {
curr = genGroupByPlanMapAggrNoSkew(dest, qb, curr);
} else {
curr = genGroupByPlanMapAggr2MR(dest, qb, curr);
}
} else if (conf.getBoolVar(HiveConf.ConfVars.HIVEGROUPBYSKEW)) {
curr = genGroupByPlan2MR(dest, qb, curr);
} else {
curr = genGroupByPlan1MR(dest, qb, curr);
}
}
curr = genPostGroupByBodyPlan(curr, dest, qb, aliasToOpInfo);
}
} else {
curr = genGroupByPlan1ReduceMultiGBY(commonGroupByDestGroup, qb, input, aliasToOpInfo);
}
}
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("Created Body Plan for Query Block " + qb.getId());
}
return curr;
}
private Map<String, Operator<? extends OperatorDesc>> createInputForDests(QB qb,
Operator<? extends OperatorDesc> input, Set<String> dests) throws SemanticException {
Map<String, Operator<? extends OperatorDesc>> inputs =
new HashMap<String, Operator<? extends OperatorDesc>>();
for (String dest : dests) {
inputs.put(dest, genLateralViewPlanForDest(dest, qb, input));
}
return inputs;
}
private Operator genPostGroupByBodyPlan(Operator curr, String dest, QB qb,
Map<String, Operator> aliasToOpInfo)
throws SemanticException {
QBParseInfo qbp = qb.getParseInfo();
// Insert HAVING plan here
if (qbp.getHavingForClause(dest) != null) {
if (getGroupByForClause(qbp, dest).size() == 0) {
throw new SemanticException("HAVING specified without GROUP BY");
}
curr = genHavingPlan(dest, qb, curr, aliasToOpInfo);
}
if(queryProperties.hasWindowing() && qb.getWindowingSpec(dest) != null) {
curr = genWindowingPlan(qb.getWindowingSpec(dest), curr);
}
curr = genSelectPlan(dest, qb, curr);
Integer limit = qbp.getDestLimit(dest);
// Expressions are not supported currently without a alias.
// Reduce sink is needed if the query contains a cluster by, distribute by,
// order by or a sort by clause.
boolean genReduceSink = false;
// Currently, expressions are not allowed in cluster by, distribute by,
// order by or a sort by clause. For each of the above clause types, check
// if the clause contains any expression.
if (qbp.getClusterByForClause(dest) != null) {
genReduceSink = true;
}
if (qbp.getDistributeByForClause(dest) != null) {
genReduceSink = true;
}
if (qbp.getOrderByForClause(dest) != null) {
genReduceSink = true;
}
if (qbp.getSortByForClause(dest) != null) {
genReduceSink = true;
}
if (genReduceSink) {
int numReducers = -1;
// Use only 1 reducer if order by is present
if (qbp.getOrderByForClause(dest) != null) {
numReducers = 1;
}
curr = genReduceSinkPlan(dest, qb, curr, numReducers);
}
if (qbp.getIsSubQ()) {
if (limit != null) {
// In case of order by, only 1 reducer is used, so no need of
// another shuffle
curr = genLimitMapRedPlan(dest, qb, curr, limit.intValue(), qbp
.getOrderByForClause(dest) != null ? false : true);
}
} else {
curr = genConversionOps(dest, qb, curr);
// exact limit can be taken care of by the fetch operator
if (limit != null) {
boolean extraMRStep = true;
if (qbp.getOrderByForClause(dest) != null ||
qb.getIsQuery() && qbp.getClusterByForClause(dest) == null &&
qbp.getSortByForClause(dest) == null) {
extraMRStep = false;
}
curr = genLimitMapRedPlan(dest, qb, curr, limit.intValue(),
extraMRStep);
qb.getParseInfo().setOuterQueryLimit(limit.intValue());
}
if (!SessionState.get().getHiveOperation().equals(HiveOperation.CREATEVIEW)) {
curr = genFileSinkPlan(dest, qb, curr);
}
}
// change curr ops row resolver's tab aliases to query alias if it
// exists
if (qb.getParseInfo().getAlias() != null) {
RowResolver rr = opParseCtx.get(curr).getRowResolver();
RowResolver newRR = new RowResolver();
String alias = qb.getParseInfo().getAlias();
for (ColumnInfo colInfo : rr.getColumnInfos()) {
String name = colInfo.getInternalName();
String[] tmp = rr.reverseLookup(name);
if ("".equals(tmp[0]) || tmp[1] == null) {
// ast expression is not a valid column name for table
tmp[1] = colInfo.getInternalName();
}
newRR.put(alias, tmp[1], colInfo);
}
opParseCtx.get(curr).setRowResolver(newRR);
}
return curr;
}
@SuppressWarnings("nls")
private Operator genUnionPlan(String unionalias, String leftalias,
Operator leftOp, String rightalias, Operator rightOp)
throws SemanticException {
// Currently, the unions are not merged - each union has only 2 parents. So,
// a n-way union will lead to (n-1) union operators.
// This can be easily merged into 1 union
RowResolver leftRR = opParseCtx.get(leftOp).getRowResolver();
RowResolver rightRR = opParseCtx.get(rightOp).getRowResolver();
HashMap<String, ColumnInfo> leftmap = leftRR.getFieldMap(leftalias);
HashMap<String, ColumnInfo> rightmap = rightRR.getFieldMap(rightalias);
// make sure the schemas of both sides are the same
ASTNode tabref = qb.getAliases().isEmpty() ? null :
qb.getParseInfo().getSrcForAlias(qb.getAliases().get(0));
if (leftmap.size() != rightmap.size()) {
throw new SemanticException("Schema of both sides of union should match.");
}
for (Map.Entry<String, ColumnInfo> lEntry : leftmap.entrySet()) {
String field = lEntry.getKey();
ColumnInfo lInfo = lEntry.getValue();
ColumnInfo rInfo = rightmap.get(field);
if (rInfo == null) {
throw new SemanticException(generateErrorMessage(tabref,
"Schema of both sides of union should match. " + rightalias
+ " does not have the field " + field));
}
if (lInfo == null) {
throw new SemanticException(generateErrorMessage(tabref,
"Schema of both sides of union should match. " + leftalias
+ " does not have the field " + field));
}
if (!lInfo.getInternalName().equals(rInfo.getInternalName())) {
throw new SemanticException(generateErrorMessage(tabref,
"Schema of both sides of union should match: field " + field + ":"
+ " appears on the left side of the UNION at column position: " +
getPositionFromInternalName(lInfo.getInternalName())
+ ", and on the right side of the UNION at column position: " +
getPositionFromInternalName(rInfo.getInternalName())
+ ". Column positions should match for a UNION"));
}
// try widening coversion, otherwise fail union
TypeInfo commonTypeInfo = FunctionRegistry.getCommonClassForUnionAll(lInfo.getType(),
rInfo.getType());
if (commonTypeInfo == null) {
throw new SemanticException(generateErrorMessage(tabref,
"Schema of both sides of union should match: Column " + field
+ " is of type " + lInfo.getType().getTypeName()
+ " on first table and type " + rInfo.getType().getTypeName()
+ " on second table"));
}
}
// construct the forward operator
RowResolver unionoutRR = new RowResolver();
for (Map.Entry<String, ColumnInfo> lEntry : leftmap.entrySet()) {
String field = lEntry.getKey();
ColumnInfo lInfo = lEntry.getValue();
ColumnInfo rInfo = rightmap.get(field);
ColumnInfo unionColInfo = new ColumnInfo(lInfo);
unionColInfo.setType(FunctionRegistry.getCommonClassForUnionAll(lInfo.getType(),
rInfo.getType()));
unionoutRR.put(unionalias, field, unionColInfo);
}
if (!(leftOp instanceof UnionOperator)) {
leftOp = genInputSelectForUnion(leftOp, leftmap, leftalias, unionoutRR, unionalias);
}
if (!(rightOp instanceof UnionOperator)) {
rightOp = genInputSelectForUnion(rightOp, rightmap, rightalias, unionoutRR, unionalias);
}
// If one of the children is a union, merge with it
// else create a new one
if ((leftOp instanceof UnionOperator) || (rightOp instanceof UnionOperator)) {
if (leftOp instanceof UnionOperator) {
// make left a child of right
List<Operator<? extends OperatorDesc>> child =
new ArrayList<Operator<? extends OperatorDesc>>();
child.add(leftOp);
rightOp.setChildOperators(child);
List<Operator<? extends OperatorDesc>> parent = leftOp
.getParentOperators();
parent.add(rightOp);
UnionDesc uDesc = ((UnionOperator) leftOp).getConf();
uDesc.setNumInputs(uDesc.getNumInputs() + 1);
return putOpInsertMap(leftOp, unionoutRR);
} else {
// make right a child of left
List<Operator<? extends OperatorDesc>> child =
new ArrayList<Operator<? extends OperatorDesc>>();
child.add(rightOp);
leftOp.setChildOperators(child);
List<Operator<? extends OperatorDesc>> parent = rightOp
.getParentOperators();
parent.add(leftOp);
UnionDesc uDesc = ((UnionOperator) rightOp).getConf();
uDesc.setNumInputs(uDesc.getNumInputs() + 1);
return putOpInsertMap(rightOp, unionoutRR);
}
}
// Create a new union operator
Operator<? extends OperatorDesc> unionforward = OperatorFactory
.getAndMakeChild(new UnionDesc(), new RowSchema(unionoutRR
.getColumnInfos()));
// set union operator as child of each of leftOp and rightOp
List<Operator<? extends OperatorDesc>> child =
new ArrayList<Operator<? extends OperatorDesc>>();
child.add(unionforward);
rightOp.setChildOperators(child);
child = new ArrayList<Operator<? extends OperatorDesc>>();
child.add(unionforward);
leftOp.setChildOperators(child);
List<Operator<? extends OperatorDesc>> parent =
new ArrayList<Operator<? extends OperatorDesc>>();
parent.add(leftOp);
parent.add(rightOp);
unionforward.setParentOperators(parent);
// create operator info list to return
return putOpInsertMap(unionforward, unionoutRR);
}
/**
* Generates a select operator which can go between the original input operator and the union
* operator. This select casts columns to match the type of the associated column in the union,
* other columns pass through unchanged. The new operator's only parent is the original input
* operator to the union, and it's only child is the union. If the input does not need to be
* cast, the original operator is returned, and no new select operator is added.
*
* @param origInputOp
* The original input operator to the union.
* @param origInputFieldMap
* A map from field name to ColumnInfo for the original input operator.
* @param origInputAlias
* The alias associated with the original input operator.
* @param unionoutRR
* The union's output row resolver.
* @param unionalias
* The alias of the union.
* @return
* @throws SemanticException
*/
private Operator<? extends OperatorDesc> genInputSelectForUnion(
Operator<? extends OperatorDesc> origInputOp, Map<String, ColumnInfo> origInputFieldMap,
String origInputAlias, RowResolver unionoutRR, String unionalias)
throws SemanticException {
List<ExprNodeDesc> columns = new ArrayList<ExprNodeDesc>();
boolean needsCast = false;
for (Map.Entry<String, ColumnInfo> unionEntry : unionoutRR.getFieldMap(unionalias).entrySet()) {
String field = unionEntry.getKey();
ColumnInfo lInfo = origInputFieldMap.get(field);
ExprNodeDesc column = new ExprNodeColumnDesc(lInfo.getType(), lInfo.getInternalName(),
lInfo.getTabAlias(), lInfo.getIsVirtualCol(), lInfo.isSkewedCol());
if (!lInfo.getType().equals(unionEntry.getValue().getType())) {
needsCast = true;
column = ParseUtils.createConversionCast(
column, (PrimitiveTypeInfo)unionEntry.getValue().getType());
}
columns.add(column);
}
// If none of the columns need to be cast there's no need for an additional select operator
if (!needsCast) {
return origInputOp;
}
RowResolver rowResolver = new RowResolver();
Map<String, ExprNodeDesc> columnExprMap = new HashMap<String, ExprNodeDesc>();
List<String> colName = new ArrayList<String>();
for (int i = 0; i < columns.size(); i++) {
String name = getColumnInternalName(i);
ColumnInfo col = new ColumnInfo(name, columns.get(i)
.getTypeInfo(), "", false);
rowResolver.put(origInputAlias, name, col);
colName.add(name);
columnExprMap.put(name, columns.get(i));
}
Operator<SelectDesc> newInputOp = OperatorFactory.getAndMakeChild(
new SelectDesc(columns, colName), new RowSchema(rowResolver.getColumnInfos()),
columnExprMap, origInputOp);
return putOpInsertMap(newInputOp, rowResolver);
}
/**
* Generates the sampling predicate from the TABLESAMPLE clause information.
* This function uses the bucket column list to decide the expression inputs
* to the predicate hash function in case useBucketCols is set to true,
* otherwise the expression list stored in the TableSample is used. The bucket
* columns of the table are used to generate this predicate in case no
* expressions are provided on the TABLESAMPLE clause and the table has
* clustering columns defined in it's metadata. The predicate created has the
* following structure:
*
* ((hash(expressions) & Integer.MAX_VALUE) % denominator) == numerator
*
* @param ts
* TABLESAMPLE clause information
* @param bucketCols
* The clustering columns of the table
* @param useBucketCols
* Flag to indicate whether the bucketCols should be used as input to
* the hash function
* @param alias
* The alias used for the table in the row resolver
* @param rwsch
* The row resolver used to resolve column references
* @param qbm
* The metadata information for the query block which is used to
* resolve unaliased columns
* @param planExpr
* The plan tree for the expression. If the user specified this, the
* parse expressions are not used
* @return exprNodeDesc
* @exception SemanticException
*/
private ExprNodeDesc genSamplePredicate(TableSample ts,
List<String> bucketCols, boolean useBucketCols, String alias,
RowResolver rwsch, QBMetaData qbm, ExprNodeDesc planExpr)
throws SemanticException {
ExprNodeDesc numeratorExpr = new ExprNodeConstantDesc(
TypeInfoFactory.intTypeInfo, Integer.valueOf(ts.getNumerator() - 1));
ExprNodeDesc denominatorExpr = new ExprNodeConstantDesc(
TypeInfoFactory.intTypeInfo, Integer.valueOf(ts.getDenominator()));
ExprNodeDesc intMaxExpr = new ExprNodeConstantDesc(
TypeInfoFactory.intTypeInfo, Integer.valueOf(Integer.MAX_VALUE));
ArrayList<ExprNodeDesc> args = new ArrayList<ExprNodeDesc>();
if (planExpr != null) {
args.add(planExpr);
} else if (useBucketCols) {
for (String col : bucketCols) {
ColumnInfo ci = rwsch.get(alias, col);
// TODO: change type to the one in the table schema
args.add(new ExprNodeColumnDesc(ci.getType(), ci.getInternalName(), ci
.getTabAlias(), ci.getIsVirtualCol()));
}
} else {
for (ASTNode expr : ts.getExprs()) {
args.add(genExprNodeDesc(expr, rwsch));
}
}
ExprNodeDesc equalsExpr = null;
{
ExprNodeDesc hashfnExpr = new ExprNodeGenericFuncDesc(
TypeInfoFactory.intTypeInfo, new GenericUDFHash(), args);
assert (hashfnExpr != null);
LOG.info("hashfnExpr = " + hashfnExpr);
ExprNodeDesc andExpr = TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("&", hashfnExpr, intMaxExpr);
assert (andExpr != null);
LOG.info("andExpr = " + andExpr);
ExprNodeDesc modExpr = TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("%", andExpr, denominatorExpr);
assert (modExpr != null);
LOG.info("modExpr = " + modExpr);
LOG.info("numeratorExpr = " + numeratorExpr);
equalsExpr = TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("==", modExpr, numeratorExpr);
LOG.info("equalsExpr = " + equalsExpr);
assert (equalsExpr != null);
}
return equalsExpr;
}
private String getAliasId(String alias, QB qb) {
return (qb.getId() == null ? alias : qb.getId() + ":" + alias).toLowerCase();
}
@SuppressWarnings("nls")
private Operator genTablePlan(String alias, QB qb) throws SemanticException {
String alias_id = getAliasId(alias, qb);
Table tab = qb.getMetaData().getSrcForAlias(alias);
RowResolver rwsch;
// is the table already present
Operator<? extends OperatorDesc> top = topOps.get(alias_id);
Operator<? extends OperatorDesc> dummySel = topSelOps.get(alias_id);
if (dummySel != null) {
top = dummySel;
}
if (top == null) {
rwsch = new RowResolver();
try {
StructObjectInspector rowObjectInspector = (StructObjectInspector) tab
.getDeserializer().getObjectInspector();
List<? extends StructField> fields = rowObjectInspector
.getAllStructFieldRefs();
for (int i = 0; i < fields.size(); i++) {
/**
* if the column is a skewed column, use ColumnInfo accordingly
*/
ColumnInfo colInfo = new ColumnInfo(fields.get(i).getFieldName(),
TypeInfoUtils.getTypeInfoFromObjectInspector(fields.get(i)
.getFieldObjectInspector()), alias, false);
colInfo.setSkewedCol((isSkewedCol(alias, qb, fields.get(i)
.getFieldName())) ? true : false);
rwsch.put(alias, fields.get(i).getFieldName(), colInfo);
}
} catch (SerDeException e) {
throw new RuntimeException(e);
}
// Hack!! - refactor once the metadata APIs with types are ready
// Finally add the partitioning columns
for (FieldSchema part_col : tab.getPartCols()) {
LOG.trace("Adding partition col: " + part_col);
rwsch.put(alias, part_col.getName(), new ColumnInfo(part_col.getName(),
TypeInfoFactory.getPrimitiveTypeInfo(part_col.getType()), alias, true));
}
// put all virtual columns in RowResolver.
Iterator<VirtualColumn> vcs = VirtualColumn.getRegistry(conf).iterator();
// use a list for easy cumtomize
List<VirtualColumn> vcList = new ArrayList<VirtualColumn>();
while (vcs.hasNext()) {
VirtualColumn vc = vcs.next();
rwsch.put(alias, vc.getName(), new ColumnInfo(vc.getName(),
vc.getTypeInfo(), alias, true, vc.getIsHidden()));
vcList.add(vc);
}
// Create the root of the operator tree
TableScanDesc tsDesc = new TableScanDesc(alias, vcList);
setupStats(tsDesc, qb.getParseInfo(), tab, alias, rwsch);
SplitSample sample = nameToSplitSample.get(alias_id);
if (sample != null && sample.getRowCount() != null) {
tsDesc.setRowLimit(sample.getRowCount());
nameToSplitSample.remove(alias_id);
}
top = putOpInsertMap(OperatorFactory.get(tsDesc,
new RowSchema(rwsch.getColumnInfos())), rwsch);
// Add this to the list of top operators - we always start from a table
// scan
topOps.put(alias_id, top);
// Add a mapping from the table scan operator to Table
topToTable.put((TableScanOperator) top, tab);
Map<String, String> props = qb.getTabPropsForAlias(alias);
if (props != null) {
topToTableProps.put((TableScanOperator) top, props);
}
} else {
rwsch = opParseCtx.get(top).getRowResolver();
top.setChildOperators(null);
}
// check if this table is sampled and needs more than input pruning
Operator<? extends OperatorDesc> tableOp = top;
TableSample ts = qb.getParseInfo().getTabSample(alias);
if (ts != null) {
int num = ts.getNumerator();
int den = ts.getDenominator();
ArrayList<ASTNode> sampleExprs = ts.getExprs();
// TODO: Do the type checking of the expressions
List<String> tabBucketCols = tab.getBucketCols();
int numBuckets = tab.getNumBuckets();
// If there are no sample cols and no bucket cols then throw an error
if (tabBucketCols.size() == 0 && sampleExprs.size() == 0) {
throw new SemanticException(ErrorMsg.NON_BUCKETED_TABLE.getMsg() + " "
+ tab.getTableName());
}
if (num > den) {
throw new SemanticException(
ErrorMsg.BUCKETED_NUMERATOR_BIGGER_DENOMINATOR.getMsg() + " "
+ tab.getTableName());
}
// check if a predicate is needed
// predicate is needed if either input pruning is not enough
// or if input pruning is not possible
// check if the sample columns are the same as the table bucket columns
boolean colsEqual = true;
if ((sampleExprs.size() != tabBucketCols.size())
&& (sampleExprs.size() != 0)) {
colsEqual = false;
}
for (int i = 0; i < sampleExprs.size() && colsEqual; i++) {
boolean colFound = false;
for (int j = 0; j < tabBucketCols.size() && !colFound; j++) {
if (sampleExprs.get(i).getToken().getType() != HiveParser.TOK_TABLE_OR_COL) {
break;
}
if (((ASTNode) sampleExprs.get(i).getChild(0)).getText()
.equalsIgnoreCase(tabBucketCols.get(j))) {
colFound = true;
}
}
colsEqual = (colsEqual && colFound);
}
// Check if input can be pruned
ts.setInputPruning((sampleExprs == null || sampleExprs.size() == 0 || colsEqual));
// check if input pruning is enough
if ((sampleExprs == null || sampleExprs.size() == 0 || colsEqual)
&& (num == den || (den % numBuckets == 0 || numBuckets % den == 0))) {
// input pruning is enough; add the filter for the optimizer to use it
// later
LOG.info("No need for sample filter");
ExprNodeDesc samplePredicate = genSamplePredicate(ts, tabBucketCols,
colsEqual, alias, rwsch, qb.getMetaData(), null);
tableOp = OperatorFactory.getAndMakeChild(new FilterDesc(
samplePredicate, true, new sampleDesc(ts.getNumerator(), ts
.getDenominator(), tabBucketCols, true)),
new RowSchema(rwsch.getColumnInfos()), top);
} else {
// need to add filter
// create tableOp to be filterDesc and set as child to 'top'
LOG.info("Need sample filter");
ExprNodeDesc samplePredicate = genSamplePredicate(ts, tabBucketCols,
colsEqual, alias, rwsch, qb.getMetaData(), null);
tableOp = OperatorFactory.getAndMakeChild(new FilterDesc(
samplePredicate, true),
new RowSchema(rwsch.getColumnInfos()), top);
}
} else {
boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVETESTMODE);
if (testMode) {
String tabName = tab.getTableName();
// has the user explicitly asked not to sample this table
String unSampleTblList = conf
.getVar(HiveConf.ConfVars.HIVETESTMODENOSAMPLE);
String[] unSampleTbls = unSampleTblList.split(",");
boolean unsample = false;
for (String unSampleTbl : unSampleTbls) {
if (tabName.equalsIgnoreCase(unSampleTbl)) {
unsample = true;
}
}
if (!unsample) {
int numBuckets = tab.getNumBuckets();
// If the input table is bucketed, choose the first bucket
if (numBuckets > 0) {
TableSample tsSample = new TableSample(1, numBuckets);
tsSample.setInputPruning(true);
qb.getParseInfo().setTabSample(alias, tsSample);
ExprNodeDesc samplePred = genSamplePredicate(tsSample, tab
.getBucketCols(), true, alias, rwsch, qb.getMetaData(), null);
tableOp = OperatorFactory
.getAndMakeChild(new FilterDesc(samplePred, true,
new sampleDesc(tsSample.getNumerator(), tsSample
.getDenominator(), tab.getBucketCols(), true)),
new RowSchema(rwsch.getColumnInfos()), top);
LOG.info("No need for sample filter");
} else {
// The table is not bucketed, add a dummy filter :: rand()
int freq = conf.getIntVar(HiveConf.ConfVars.HIVETESTMODESAMPLEFREQ);
TableSample tsSample = new TableSample(1, freq);
tsSample.setInputPruning(false);
qb.getParseInfo().setTabSample(alias, tsSample);
LOG.info("Need sample filter");
ExprNodeDesc randFunc = TypeCheckProcFactory.DefaultExprProcessor
.getFuncExprNodeDesc("rand", new ExprNodeConstantDesc(Integer
.valueOf(460476415)));
ExprNodeDesc samplePred = genSamplePredicate(tsSample, null, false,
alias, rwsch, qb.getMetaData(), randFunc);
tableOp = OperatorFactory.getAndMakeChild(new FilterDesc(
samplePred, true),
new RowSchema(rwsch.getColumnInfos()), top);
}
}
}
}
Operator output = putOpInsertMap(tableOp, rwsch);
if (LOG.isDebugEnabled()) {
LOG.debug("Created Table Plan for " + alias + " " + tableOp.toString());
}
return output;
}
private boolean isSkewedCol(String alias, QB qb, String colName) {
boolean isSkewedCol = false;
List<String> skewedCols = qb.getSkewedColumnNames(alias);
for (String skewedCol : skewedCols) {
if (skewedCol.equalsIgnoreCase(colName)) {
isSkewedCol = true;
}
}
return isSkewedCol;
}
private void setupStats(TableScanDesc tsDesc, QBParseInfo qbp, Table tab, String alias,
RowResolver rwsch)
throws SemanticException {
if (!qbp.isAnalyzeCommand()) {
tsDesc.setGatherStats(false);
} else {
if (HiveConf.getVar(conf, HIVESTATSDBCLASS).equalsIgnoreCase(StatDB.fs.name())) {
String statsTmpLoc = ctx.getExternalTmpPath(tab.getPath().toUri()).toString();
LOG.info("Set stats collection dir : " + statsTmpLoc);
conf.set(StatsSetupConst.STATS_TMP_LOC, statsTmpLoc);
}
tsDesc.setGatherStats(true);
tsDesc.setStatsReliable(conf.getBoolVar(HiveConf.ConfVars.HIVE_STATS_RELIABLE));
tsDesc.setMaxStatsKeyPrefixLength(StatsFactory.getMaxPrefixLength(conf));
// append additional virtual columns for storing statistics
Iterator<VirtualColumn> vcs = VirtualColumn.getStatsRegistry(conf).iterator();
List<VirtualColumn> vcList = new ArrayList<VirtualColumn>();
while (vcs.hasNext()) {
VirtualColumn vc = vcs.next();
rwsch.put(alias, vc.getName(), new ColumnInfo(vc.getName(),
vc.getTypeInfo(), alias, true, vc.getIsHidden()));
vcList.add(vc);
}
tsDesc.addVirtualCols(vcList);
String tblName = tab.getTableName();
tableSpec tblSpec = qbp.getTableSpec(alias);
Map<String, String> partSpec = tblSpec.getPartSpec();
if (partSpec != null) {
List<String> cols = new ArrayList<String>();
cols.addAll(partSpec.keySet());
tsDesc.setPartColumns(cols);
}
// Theoretically the key prefix could be any unique string shared
// between TableScanOperator (when publishing) and StatsTask (when aggregating).
// Here we use
// db_name.table_name + partitionSec
// as the prefix for easy of read during explain and debugging.
// Currently, partition spec can only be static partition.
String k = tblName + Path.SEPARATOR;
tsDesc.setStatsAggPrefix(tab.getDbName()+"."+k);
// set up WriteEntity for replication
outputs.add(new WriteEntity(tab, WriteEntity.WriteType.DDL_SHARED));
// add WriteEntity for each matching partition
if (tab.isPartitioned()) {
if (partSpec == null) {
throw new SemanticException(ErrorMsg.NEED_PARTITION_SPECIFICATION.getMsg());
}
List<Partition> partitions = qbp.getTableSpec().partitions;
if (partitions != null) {
for (Partition partn : partitions) {
// inputs.add(new ReadEntity(partn)); // is this needed at all?
outputs.add(new WriteEntity(partn, WriteEntity.WriteType.DDL_NO_LOCK));
}
}
}
}
}
private Operator genPlan(QBExpr qbexpr) throws SemanticException {
if (qbexpr.getOpcode() == QBExpr.Opcode.NULLOP) {
return genPlan(qbexpr.getQB());
}
if (qbexpr.getOpcode() == QBExpr.Opcode.UNION) {
Operator qbexpr1Ops = genPlan(qbexpr.getQBExpr1());
Operator qbexpr2Ops = genPlan(qbexpr.getQBExpr2());
return genUnionPlan(qbexpr.getAlias(), qbexpr.getQBExpr1().getAlias(),
qbexpr1Ops, qbexpr.getQBExpr2().getAlias(), qbexpr2Ops);
}
return null;
}
@SuppressWarnings("nls")
public Operator genPlan(QB qb) throws SemanticException {
// First generate all the opInfos for the elements in the from clause
Map<String, Operator> aliasToOpInfo = new HashMap<String, Operator>();
// Recurse over the subqueries to fill the subquery part of the plan
for (String alias : qb.getSubqAliases()) {
QBExpr qbexpr = qb.getSubqForAlias(alias);
aliasToOpInfo.put(alias, genPlan(qbexpr));
qbexpr.setAlias(alias);
}
// Recurse over all the source tables
for (String alias : qb.getTabAliases()) {
Operator op = genTablePlan(alias, qb);
aliasToOpInfo.put(alias, op);
}
if (aliasToOpInfo.isEmpty()) {
qb.getMetaData().setSrcForAlias(DUMMY_TABLE, getDummyTable());
TableScanOperator op = (TableScanOperator) genTablePlan(DUMMY_TABLE, qb);
op.getConf().setRowLimit(1);
qb.addAlias(DUMMY_TABLE);
qb.setTabAlias(DUMMY_TABLE, DUMMY_TABLE);
aliasToOpInfo.put(DUMMY_TABLE, op);
}
Operator srcOpInfo = null;
Operator lastPTFOp = null;
if(queryProperties.hasPTF()){
//After processing subqueries and source tables, process
// partitioned table functions
HashMap<ASTNode, PTFInvocationSpec> ptfNodeToSpec = qb.getPTFNodeToSpec();
if ( ptfNodeToSpec != null ) {
for(Entry<ASTNode, PTFInvocationSpec> entry : ptfNodeToSpec.entrySet()) {
ASTNode ast = entry.getKey();
PTFInvocationSpec spec = entry.getValue();
String inputAlias = spec.getQueryInputName();
Operator inOp = aliasToOpInfo.get(inputAlias);
if ( inOp == null ) {
throw new SemanticException(generateErrorMessage(ast,
"Cannot resolve input Operator for PTF invocation"));
}
lastPTFOp = genPTFPlan(spec, inOp);
String ptfAlias = spec.getFunction().getAlias();
if ( ptfAlias != null ) {
aliasToOpInfo.put(ptfAlias, lastPTFOp);
}
}
}
}
// For all the source tables that have a lateral view, attach the
// appropriate operators to the TS
genLateralViewPlans(aliasToOpInfo, qb);
// process join
if (qb.getParseInfo().getJoinExpr() != null) {
ASTNode joinExpr = qb.getParseInfo().getJoinExpr();
if (joinExpr.getToken().getType() == HiveParser.TOK_UNIQUEJOIN) {
QBJoinTree joinTree = genUniqueJoinTree(qb, joinExpr, aliasToOpInfo);
qb.setQbJoinTree(joinTree);
} else {
QBJoinTree joinTree = genJoinTree(qb, joinExpr, aliasToOpInfo);
qb.setQbJoinTree(joinTree);
/*
* if there is only one destination in Query try to push where predicates
* as Join conditions
*/
Set<String> dests = qb.getParseInfo().getClauseNames();
if ( dests.size() == 1 ) {
String dest = dests.iterator().next();
ASTNode whereClause = qb.getParseInfo().getWhrForClause(dest);
if ( whereClause != null ) {
extractJoinCondsFromWhereClause(joinTree, qb, dest,
(ASTNode) whereClause.getChild(0),
aliasToOpInfo );
}
}
if (!disableJoinMerge)
mergeJoinTree(qb);
}
// if any filters are present in the join tree, push them on top of the
// table
pushJoinFilters(qb, qb.getQbJoinTree(), aliasToOpInfo);
srcOpInfo = genJoinPlan(qb, aliasToOpInfo);
} else {
// Now if there are more than 1 sources then we have a join case
// later we can extend this to the union all case as well
srcOpInfo = aliasToOpInfo.values().iterator().next();
// with ptfs, there maybe more (note for PTFChains:
// 1 ptf invocation may entail multiple PTF operators)
srcOpInfo = lastPTFOp != null ? lastPTFOp : srcOpInfo;
}
Operator bodyOpInfo = genBodyPlan(qb, srcOpInfo, aliasToOpInfo);
if (LOG.isDebugEnabled()) {
LOG.debug("Created Plan for Query Block " + qb.getId());
}
this.qb = qb;
return bodyOpInfo;
}
private Table getDummyTable() throws SemanticException {
Path dummyPath = createDummyFile();
Table desc = new Table(DUMMY_DATABASE, DUMMY_TABLE);
desc.getTTable().getSd().setLocation(dummyPath.toString());
desc.getTTable().getSd().getSerdeInfo().setSerializationLib(NullStructSerDe.class.getName());
desc.setInputFormatClass(NullRowsInputFormat.class);
desc.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class);
return desc;
}
// add dummy data for not removed by CombineHiveInputFormat, etc.
private Path createDummyFile() throws SemanticException {
Path dummyPath = new Path(ctx.getMRScratchDir(), "dummy_path");
Path dummyFile = new Path(dummyPath, "dummy_file");
FSDataOutputStream fout = null;
try {
FileSystem fs = dummyFile.getFileSystem(conf);
if (fs.exists(dummyFile)) {
return dummyPath;
}
fout = fs.create(dummyFile);
fout.write(1);
fout.close();
} catch (IOException e) {
throw new SemanticException(e);
} finally {
IOUtils.closeStream(fout);
}
return dummyPath;
}
/**
* Generates the operator DAG needed to implement lateral views and attaches
* it to the TS operator.
*
* @param aliasToOpInfo
* A mapping from a table alias to the TS operator. This function
* replaces the operator mapping as necessary
* @param qb
* @throws SemanticException
*/
void genLateralViewPlans(Map<String, Operator> aliasToOpInfo, QB qb)
throws SemanticException {
Map<String, ArrayList<ASTNode>> aliasToLateralViews = qb.getParseInfo()
.getAliasToLateralViews();
for (Entry<String, Operator> e : aliasToOpInfo.entrySet()) {
String alias = e.getKey();
// See if the alias has a lateral view. If so, chain the lateral view
// operator on
ArrayList<ASTNode> lateralViews = aliasToLateralViews.get(alias);
if (lateralViews != null) {
Operator op = e.getValue();
for (ASTNode lateralViewTree : aliasToLateralViews.get(alias)) {
// There are 2 paths from the TS operator (or a previous LVJ operator)
// to the same LateralViewJoinOperator.
// TS -> SelectOperator(*) -> LateralViewJoinOperator
// TS -> SelectOperator (gets cols for UDTF) -> UDTFOperator0
// -> LateralViewJoinOperator
//
Operator lateralViewJoin = genLateralViewPlan(qb, op, lateralViewTree);
op = lateralViewJoin;
}
e.setValue(op);
}
}
}
private Operator genLateralViewPlanForDest(String dest, QB qb, Operator op)
throws SemanticException {
ASTNode lateralViewTree = qb.getParseInfo().getDestToLateralView().get(dest);
if (lateralViewTree != null) {
return genLateralViewPlan(qb, op, lateralViewTree);
}
return op;
}
private Operator genLateralViewPlan(QB qb, Operator op, ASTNode lateralViewTree)
throws SemanticException {
RowResolver lvForwardRR = new RowResolver();
RowResolver source = opParseCtx.get(op).getRowResolver();
for (ColumnInfo col : source.getColumnInfos()) {
String[] tabCol = source.reverseLookup(col.getInternalName());
lvForwardRR.put(tabCol[0], tabCol[1], col);
}
Operator lvForward = putOpInsertMap(OperatorFactory.getAndMakeChild(
new LateralViewForwardDesc(), new RowSchema(lvForwardRR.getColumnInfos()),
op), lvForwardRR);
// The order in which the two paths are added is important. The
// lateral view join operator depends on having the select operator
// give it the row first.
// Get the all path by making a select(*).
RowResolver allPathRR = opParseCtx.get(lvForward).getRowResolver();
// Operator allPath = op;
Operator allPath = putOpInsertMap(OperatorFactory.getAndMakeChild(
new SelectDesc(true), new RowSchema(allPathRR.getColumnInfos()),
lvForward), allPathRR);
int allColumns = allPathRR.getColumnInfos().size();
// Get the UDTF Path
QB blankQb = new QB(null, null, false);
Operator udtfPath = genSelectPlan((ASTNode) lateralViewTree
.getChild(0), blankQb, lvForward,
lateralViewTree.getType() == HiveParser.TOK_LATERAL_VIEW_OUTER);
// add udtf aliases to QB
for (String udtfAlias : blankQb.getAliases()) {
qb.addAlias(udtfAlias);
}
RowResolver udtfPathRR = opParseCtx.get(udtfPath).getRowResolver();
// Merge the two into the lateral view join
// The cols of the merged result will be the combination of both the
// cols of the UDTF path and the cols of the all path. The internal
// names have to be changed to avoid conflicts
RowResolver lateralViewRR = new RowResolver();
ArrayList<String> outputInternalColNames = new ArrayList<String>();
// For PPD, we need a column to expression map so that during the walk,
// the processor knows how to transform the internal col names.
// Following steps are dependant on the fact that we called
// LVmerge.. in the above order
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
LVmergeRowResolvers(allPathRR, lateralViewRR, colExprMap, outputInternalColNames);
LVmergeRowResolvers(udtfPathRR, lateralViewRR, colExprMap, outputInternalColNames);
Operator lateralViewJoin = putOpInsertMap(OperatorFactory
.getAndMakeChild(new LateralViewJoinDesc(allColumns, outputInternalColNames),
new RowSchema(lateralViewRR.getColumnInfos()), allPath,
udtfPath), lateralViewRR);
lateralViewJoin.setColumnExprMap(colExprMap);
return lateralViewJoin;
}
/**
* A helper function that gets all the columns and respective aliases in the
* source and puts them into dest. It renames the internal names of the
* columns based on getColumnInternalName(position).
*
* Note that this helper method relies on RowResolver.getColumnInfos()
* returning the columns in the same order as they will be passed in the
* operator DAG.
*
* @param source
* @param dest
* @param outputColNames
* - a list to which the new internal column names will be added, in
* the same order as in the dest row resolver
*/
private void LVmergeRowResolvers(RowResolver source, RowResolver dest,
Map<String, ExprNodeDesc> colExprMap, ArrayList<String> outputInternalColNames) {
for (ColumnInfo c : source.getColumnInfos()) {
String internalName = getColumnInternalName(outputInternalColNames.size());
outputInternalColNames.add(internalName);
ColumnInfo newCol = new ColumnInfo(internalName, c.getType(), c
.getTabAlias(), c.getIsVirtualCol(), c.isHiddenVirtualCol());
String[] tableCol = source.reverseLookup(c.getInternalName());
String tableAlias = tableCol[0];
String colAlias = tableCol[1];
dest.put(tableAlias, colAlias, newCol);
colExprMap.put(internalName, new ExprNodeColumnDesc(c.getType(), c.getInternalName(),
c.getTabAlias(), c.getIsVirtualCol()));
}
}
@SuppressWarnings("nls")
public Phase1Ctx initPhase1Ctx() {
Phase1Ctx ctx_1 = new Phase1Ctx();
ctx_1.nextNum = 0;
ctx_1.dest = "reduce";
return ctx_1;
}
@Override
public void init() {
// clear most members
reset();
// init
QB qb = new QB(null, null, false);
this.qb = qb;
}
@Override
@SuppressWarnings("nls")
public void analyzeInternal(ASTNode ast) throws SemanticException {
ASTNode child = ast;
this.ast = ast;
viewsExpanded = new ArrayList<String>();
ctesExpanded = new ArrayList<String>();
LOG.info("Starting Semantic Analysis");
// analyze and process the position alias
processPositionAlias(ast);
// analyze create table command
if (ast.getToken().getType() == HiveParser.TOK_CREATETABLE) {
// if it is not CTAS, we don't need to go further and just return
if ((child = analyzeCreateTable(ast, qb)) == null) {
return;
}
} else {
SessionState.get().setCommandType(HiveOperation.QUERY);
}
// analyze create view command
if (ast.getToken().getType() == HiveParser.TOK_CREATEVIEW ||
ast.getToken().getType() == HiveParser.TOK_ALTERVIEW_AS) {
child = analyzeCreateView(ast, qb);
SessionState.get().setCommandType(HiveOperation.CREATEVIEW);
if (child == null) {
return;
}
viewSelect = child;
// prevent view from referencing itself
viewsExpanded.add(SessionState.get().getCurrentDatabase() + "." + createVwDesc.getViewName());
}
// continue analyzing from the child ASTNode.
Phase1Ctx ctx_1 = initPhase1Ctx();
if (!doPhase1(child, qb, ctx_1)) {
// if phase1Result false return
return;
}
LOG.info("Completed phase 1 of Semantic Analysis");
getMetaData(qb);
LOG.info("Completed getting MetaData in Semantic Analysis");
if (runCBO) {
boolean tokenTypeIsQuery = ast.getToken().getType() == HiveParser.TOK_QUERY
|| ast.getToken().getType() == HiveParser.TOK_EXPLAIN;
if (!tokenTypeIsQuery || createVwDesc != null
|| !HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_ENABLED)
|| !canHandleQuery()) {
runCBO = false;
}
if (runCBO) {
disableJoinMerge = true;
}
}
// Save the result schema derived from the sink operator produced
// by genPlan. This has the correct column names, which clients
// such as JDBC would prefer instead of the c0, c1 we'll end
// up with later.
Operator sinkOp = null;
if (runCBO) {
boolean reAnalyzeAST = false;
try {
// 1. Gen Optimized AST
ASTNode newAST = new OptiqBasedPlanner().getOptimizedAST();
// 2. Regen OP plan from optimized AST
init();
ctx_1 = initPhase1Ctx();
if (!doPhase1(newAST, qb, ctx_1)) {
throw new RuntimeException(
"Couldn't do phase1 on CBO optimized query plan");
}
getMetaData(qb);
disableJoinMerge = true;
sinkOp = genPlan(qb);
/*
* Use non CBO Result Set Schema so as to preserve user specified names.
* Hive seems to have bugs with OB/LIMIT in sub queries. // 3. Reset
* result set schema resultSchema =
* convertRowSchemaToResultSetSchema(opParseCtx.get(sinkOp)
* .getRowResolver(), true);
*/
} catch (Exception e) {
LOG.warn("CBO failed, skipping CBO. ", e);
throw new RuntimeException(e);
} finally {
runCBO = false;
disableJoinMerge = false;
if (reAnalyzeAST) {
init();
analyzeInternal(ast);
return;
}
}
} else {
sinkOp = genPlan(qb);
}
if (createVwDesc != null)
resultSchema = convertRowSchemaToViewSchema(opParseCtx.get(sinkOp).getRowResolver());
else
resultSchema = convertRowSchemaToResultSetSchema(opParseCtx.get(sinkOp).getRowResolver(),
HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_RESULTSET_USE_UNIQUE_COLUMN_NAMES));
ParseContext pCtx = new ParseContext(conf, qb, child, opToPartPruner,
opToPartList, topOps, topSelOps, opParseCtx, joinContext, smbMapJoinContext,
topToTable, topToTableProps, fsopToTable,
loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
listMapJoinOpsNoReducer, groupOpToInputTables, prunedPartitions,
opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks,
opToPartToSkewedPruner, viewAliasToInput,
reduceSinkOperatorsAddedByEnforceBucketingSorting, queryProperties);
if (createVwDesc != null) {
saveViewDefinition();
// validate the create view statement
// at this point, the createVwDesc gets all the information for semantic check
validateCreateView(createVwDesc);
// Since we're only creating a view (not executing it), we
// don't need to optimize or translate the plan (and in fact, those
// procedures can interfere with the view creation). So
// skip the rest of this method.
ctx.setResDir(null);
ctx.setResFile(null);
try {
PlanUtils.addInputsForView(pCtx);
} catch (HiveException e) {
throw new SemanticException(e);
}
return;
}
// Generate table access stats if required
if (HiveConf.getBoolVar(this.conf, HiveConf.ConfVars.HIVE_STATS_COLLECT_TABLEKEYS) == true) {
TableAccessAnalyzer tableAccessAnalyzer = new TableAccessAnalyzer(pCtx);
setTableAccessInfo(tableAccessAnalyzer.analyzeTableAccess());
}
if (LOG.isDebugEnabled()) {
LOG.debug("Before logical optimization\n" + Operator.toString(pCtx.getTopOps().values()));
}
Optimizer optm = new Optimizer();
optm.setPctx(pCtx);
optm.initialize(conf);
pCtx = optm.optimize();
FetchTask origFetchTask = pCtx.getFetchTask();
if (LOG.isDebugEnabled()) {
LOG.debug("After logical optimization\n" + Operator.toString(pCtx.getTopOps().values()));
}
// Generate column access stats if required - wait until column pruning takes place
// during optimization
if (HiveConf.getBoolVar(this.conf, HiveConf.ConfVars.HIVE_STATS_COLLECT_SCANCOLS) == true) {
ColumnAccessAnalyzer columnAccessAnalyzer = new ColumnAccessAnalyzer(pCtx);
setColumnAccessInfo(columnAccessAnalyzer.analyzeColumnAccess());
}
if (!ctx.getExplainLogical()) {
// At this point we have the complete operator tree
// from which we want to create the map-reduce plan
TaskCompiler compiler = TaskCompilerFactory.getCompiler(conf, pCtx);
compiler.init(conf, console, db);
compiler.compile(pCtx, rootTasks, inputs, outputs);
fetchTask = pCtx.getFetchTask();
}
LOG.info("Completed plan generation");
if (!ctx.getExplain()) {
// if desired check we're not going over partition scan limits
enforceScanLimits(pCtx, origFetchTask);
}
return;
}
private void enforceScanLimits(ParseContext pCtx, FetchTask fTask)
throws SemanticException {
int scanLimit = HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVELIMITTABLESCANPARTITION);
if (scanLimit > -1) {
// a scan limit on the number of partitions has been set by the user
if (fTask != null) {
// having a fetch task at this point means that we're not going to
// launch a job on the cluster
if (!fTask.getWork().isNotPartitioned() && fTask.getWork().getLimit() == -1
&& scanLimit < fTask.getWork().getPartDir().size()) {
throw new SemanticException(ErrorMsg.PARTITION_SCAN_LIMIT_EXCEEDED, ""
+ fTask.getWork().getPartDir().size(), ""
+ fTask.getWork().getTblDesc().getTableName(), "" + scanLimit);
}
} else {
// At this point we've run the partition pruner for all top ops. Let's
// check whether any of them break the limit
for (Operator<?> topOp : topOps.values()) {
if (topOp instanceof TableScanOperator) {
if (((TableScanDesc)topOp.getConf()).getIsMetadataOnly()) {
continue;
}
PrunedPartitionList parts = pCtx.getOpToPartList().get((TableScanOperator) topOp);
if (parts.getPartitions().size() > scanLimit) {
throw new SemanticException(ErrorMsg.PARTITION_SCAN_LIMIT_EXCEEDED, ""
+ parts.getPartitions().size(), "" + parts.getSourceTable().getTableName(), ""
+ scanLimit);
}
}
}
}
}
}
@Override
public List<FieldSchema> getResultSchema() {
return resultSchema;
}
private void saveViewDefinition() throws SemanticException {
// Make a copy of the statement's result schema, since we may
// modify it below as part of imposing view column names.
List<FieldSchema> derivedSchema =
new ArrayList<FieldSchema>(resultSchema);
ParseUtils.validateColumnNameUniqueness(derivedSchema);
List<FieldSchema> imposedSchema = createVwDesc.getSchema();
if (imposedSchema != null) {
int explicitColCount = imposedSchema.size();
int derivedColCount = derivedSchema.size();
if (explicitColCount != derivedColCount) {
throw new SemanticException(generateErrorMessage(
viewSelect,
ErrorMsg.VIEW_COL_MISMATCH.getMsg()));
}
}
// Preserve the original view definition as specified by the user.
String originalText = ctx.getTokenRewriteStream().toString(
viewSelect.getTokenStartIndex(), viewSelect.getTokenStopIndex());
createVwDesc.setViewOriginalText(originalText);
// Now expand the view definition with extras such as explicit column
// references; this expanded form is what we'll re-parse when the view is
// referenced later.
unparseTranslator.applyTranslations(ctx.getTokenRewriteStream());
String expandedText = ctx.getTokenRewriteStream().toString(
viewSelect.getTokenStartIndex(), viewSelect.getTokenStopIndex());
if (imposedSchema != null) {
// Merge the names from the imposed schema into the types
// from the derived schema.
StringBuilder sb = new StringBuilder();
sb.append("SELECT ");
int n = derivedSchema.size();
for (int i = 0; i < n; ++i) {
if (i > 0) {
sb.append(", ");
}
FieldSchema fieldSchema = derivedSchema.get(i);
// Modify a copy, not the original
fieldSchema = new FieldSchema(fieldSchema);
derivedSchema.set(i, fieldSchema);
sb.append(HiveUtils.unparseIdentifier(fieldSchema.getName(), conf));
sb.append(" AS ");
String imposedName = imposedSchema.get(i).getName();
sb.append(HiveUtils.unparseIdentifier(imposedName, conf));
fieldSchema.setName(imposedName);
// We don't currently allow imposition of a type
fieldSchema.setComment(imposedSchema.get(i).getComment());
}
sb.append(" FROM (");
sb.append(expandedText);
sb.append(") ");
sb.append(HiveUtils.unparseIdentifier(createVwDesc.getViewName(), conf));
expandedText = sb.toString();
}
if (createVwDesc.getPartColNames() != null) {
// Make sure all partitioning columns referenced actually
// exist and are in the correct order at the end
// of the list of columns produced by the view. Also move the field
// schema descriptors from derivedSchema to the partitioning key
// descriptor.
List<String> partColNames = createVwDesc.getPartColNames();
if (partColNames.size() > derivedSchema.size()) {
throw new SemanticException(
ErrorMsg.VIEW_PARTITION_MISMATCH.getMsg());
}
// Get the partition columns from the end of derivedSchema.
List<FieldSchema> partitionColumns = derivedSchema.subList(
derivedSchema.size() - partColNames.size(),
derivedSchema.size());
// Verify that the names match the PARTITIONED ON clause.
Iterator<String> colNameIter = partColNames.iterator();
Iterator<FieldSchema> schemaIter = partitionColumns.iterator();
while (colNameIter.hasNext()) {
String colName = colNameIter.next();
FieldSchema fieldSchema = schemaIter.next();
if (!fieldSchema.getName().equals(colName)) {
throw new SemanticException(
ErrorMsg.VIEW_PARTITION_MISMATCH.getMsg());
}
}
// Boundary case: require at least one non-partitioned column
// for consistency with tables.
if (partColNames.size() == derivedSchema.size()) {
throw new SemanticException(
ErrorMsg.VIEW_PARTITION_TOTAL.getMsg());
}
// Now make a copy.
createVwDesc.setPartCols(
new ArrayList<FieldSchema>(partitionColumns));
// Finally, remove the partition columns from the end of derivedSchema.
// (Clearing the subList writes through to the underlying
// derivedSchema ArrayList.)
partitionColumns.clear();
}
createVwDesc.setSchema(derivedSchema);
createVwDesc.setViewExpandedText(expandedText);
}
private List<FieldSchema> convertRowSchemaToViewSchema(RowResolver rr) throws SemanticException {
List<FieldSchema> fieldSchema = convertRowSchemaToResultSetSchema(rr, false);
ParseUtils.validateColumnNameUniqueness(fieldSchema);
return fieldSchema;
}
private List<FieldSchema> convertRowSchemaToResultSetSchema(RowResolver rr,
boolean useTabAliasIfAvailable) {
List<FieldSchema> fieldSchemas = new ArrayList<FieldSchema>();
String[] qualifiedColName;
String colName;
for (ColumnInfo colInfo : rr.getColumnInfos()) {
if (colInfo.isHiddenVirtualCol()) {
continue;
}
qualifiedColName = rr.reverseLookup(colInfo.getInternalName());
if (useTabAliasIfAvailable && qualifiedColName[0] != null && !qualifiedColName[0].isEmpty()) {
colName = qualifiedColName[0] + "." + qualifiedColName[1];
} else {
colName = qualifiedColName[1];
}
fieldSchemas.add(new FieldSchema(colName, colInfo.getType().getTypeName(), null));
}
return fieldSchemas;
}
/**
* Generates an expression node descriptor for the expression with TypeCheckCtx.
*/
public ExprNodeDesc genExprNodeDesc(ASTNode expr, RowResolver input)
throws SemanticException {
// Since the user didn't supply a customized type-checking context,
// use default settings.
TypeCheckCtx tcCtx = new TypeCheckCtx(input);
return genExprNodeDesc(expr, input, tcCtx);
}
/**
* Generates an expression node descriptors for the expression and children of it
* with default TypeCheckCtx.
*/
public Map<ASTNode, ExprNodeDesc> genAllExprNodeDesc(ASTNode expr, RowResolver input)
throws SemanticException {
TypeCheckCtx tcCtx = new TypeCheckCtx(input);
return genAllExprNodeDesc(expr, input, tcCtx);
}
/**
* Returns expression node descriptor for the expression.
* If it's evaluated already in previous operator, it can be retrieved from cache.
*/
public ExprNodeDesc genExprNodeDesc(ASTNode expr, RowResolver input,
TypeCheckCtx tcCtx) throws SemanticException {
// We recursively create the exprNodeDesc. Base cases: when we encounter
// a column ref, we convert that into an exprNodeColumnDesc; when we
// encounter
// a constant, we convert that into an exprNodeConstantDesc. For others we
// just
// build the exprNodeFuncDesc with recursively built children.
// If the current subExpression is pre-calculated, as in Group-By etc.
ExprNodeDesc cached = getExprNodeDescCached(expr, input);
if (cached == null) {
Map<ASTNode, ExprNodeDesc> allExprs = genAllExprNodeDesc(expr, input, tcCtx);
return allExprs.get(expr);
}
return cached;
}
/**
* Find ExprNodeDesc for the expression cached in the RowResolver. Returns null if not exists.
*/
private ExprNodeDesc getExprNodeDescCached(ASTNode expr, RowResolver input)
throws SemanticException {
ColumnInfo colInfo = input.getExpression(expr);
if (colInfo != null) {
ASTNode source = input.getExpressionSource(expr);
if (source != null) {
unparseTranslator.addCopyTranslation(expr, source);
}
return new ExprNodeColumnDesc(colInfo.getType(), colInfo
.getInternalName(), colInfo.getTabAlias(), colInfo
.getIsVirtualCol(), colInfo.isSkewedCol());
}
return null;
}
/**
* Generates all of the expression node descriptors for the expression and children of it
* passed in the arguments. This function uses the row resolver and the metadata information
* that are passed as arguments to resolve the column names to internal names.
*
* @param expr
* The expression
* @param input
* The row resolver
* @param tcCtx
* Customized type-checking context
* @return expression to exprNodeDesc mapping
* @throws SemanticException Failed to evaluate expression
*/
@SuppressWarnings("nls")
public Map<ASTNode, ExprNodeDesc> genAllExprNodeDesc(ASTNode expr, RowResolver input,
TypeCheckCtx tcCtx) throws SemanticException {
// Create the walker and the rules dispatcher.
tcCtx.setUnparseTranslator(unparseTranslator);
Map<ASTNode, ExprNodeDesc> nodeOutputs =
TypeCheckProcFactory.genExprNode(expr, tcCtx);
ExprNodeDesc desc = nodeOutputs.get(expr);
if (desc == null) {
String errMsg = tcCtx.getError();
if (errMsg == null) {
errMsg = "Error in parsing ";
}
throw new SemanticException(errMsg);
}
if (desc instanceof ExprNodeColumnListDesc) {
throw new SemanticException("TOK_ALLCOLREF is not supported in current context");
}
if (!unparseTranslator.isEnabled()) {
// Not creating a view, so no need to track view expansions.
return nodeOutputs;
}
for (Map.Entry<ASTNode, ExprNodeDesc> entry : nodeOutputs.entrySet()) {
if (!(entry.getValue() instanceof ExprNodeColumnDesc)) {
continue;
}
ASTNode node = entry.getKey();
ExprNodeColumnDesc columnDesc = (ExprNodeColumnDesc) entry.getValue();
if ((columnDesc.getTabAlias() == null)
|| (columnDesc.getTabAlias().length() == 0)) {
// These aren't real column refs; instead, they are special
// internal expressions used in the representation of aggregation.
continue;
}
String[] tmp = input.reverseLookup(columnDesc.getColumn());
StringBuilder replacementText = new StringBuilder();
replacementText.append(HiveUtils.unparseIdentifier(tmp[0], conf));
replacementText.append(".");
replacementText.append(HiveUtils.unparseIdentifier(tmp[1], conf));
unparseTranslator.addTranslation(node, replacementText.toString());
}
return nodeOutputs;
}
@Override
public void validate() throws SemanticException {
LOG.debug("validation start");
// Validate inputs and outputs have right protectmode to execute the query
for (ReadEntity readEntity : getInputs()) {
ReadEntity.Type type = readEntity.getType();
if (type != ReadEntity.Type.TABLE &&
type != ReadEntity.Type.PARTITION) {
// In current implementation it will never happen, but we leave it
// here to make the logic complete.
continue;
}
Table tbl = readEntity.getTable();
Partition p = readEntity.getPartition();
if (tbl.isOffline()) {
throw new SemanticException(
ErrorMsg.OFFLINE_TABLE_OR_PARTITION.getMsg(
"Table " + tbl.getTableName()));
}
if (type == ReadEntity.Type.PARTITION && p != null && p.isOffline()) {
throw new SemanticException(
ErrorMsg.OFFLINE_TABLE_OR_PARTITION.getMsg(
"Table " + tbl.getTableName() +
" Partition " + p.getName()));
}
}
for (WriteEntity writeEntity : getOutputs()) {
WriteEntity.Type type = writeEntity.getType();
if (type == WriteEntity.Type.PARTITION || type == WriteEntity.Type.DUMMYPARTITION) {
String conflictingArchive;
try {
Partition usedp = writeEntity.getPartition();
Table tbl = usedp.getTable();
LOG.debug("validated " + usedp.getName());
LOG.debug(usedp.getTable());
conflictingArchive = ArchiveUtils
.conflictingArchiveNameOrNull(db, tbl, usedp.getSpec());
} catch (HiveException e) {
throw new SemanticException(e);
}
if (conflictingArchive != null) {
String message = String.format("Insert conflict with existing archive: %s",
conflictingArchive);
throw new SemanticException(message);
}
}
if (type != WriteEntity.Type.TABLE &&
type != WriteEntity.Type.PARTITION) {
LOG.debug("not validating writeEntity, because entity is neither table nor partition");
continue;
}
Table tbl;
Partition p;
if (type == WriteEntity.Type.PARTITION) {
Partition inputPartition = writeEntity.getPartition();
// If it is a partition, Partition's metastore is not fetched. We
// need to fetch it.
try {
p = Hive.get().getPartition(
inputPartition.getTable(), inputPartition.getSpec(), false);
if (p != null) {
tbl = p.getTable();
} else {
// if p is null, we assume that we insert to a new partition
tbl = inputPartition.getTable();
}
} catch (HiveException e) {
throw new SemanticException(e);
}
if (type == WriteEntity.Type.PARTITION && p != null && p.isOffline()) {
throw new SemanticException(
ErrorMsg.OFFLINE_TABLE_OR_PARTITION.getMsg(
" Table " + tbl.getTableName() +
" Partition " + p.getName()));
}
}
else {
LOG.debug("Not a partition.");
tbl = writeEntity.getTable();
}
if (tbl.isOffline()) {
throw new SemanticException(
ErrorMsg.OFFLINE_TABLE_OR_PARTITION.getMsg(
"Table " + tbl.getTableName()));
}
}
boolean reworkMapredWork = HiveConf.getBoolVar(this.conf,
HiveConf.ConfVars.HIVE_REWORK_MAPREDWORK);
// validate all tasks
for (Task<? extends Serializable> rootTask : rootTasks) {
validate(rootTask, reworkMapredWork);
}
}
private void validate(Task<? extends Serializable> task, boolean reworkMapredWork)
throws SemanticException {
Utilities.reworkMapRedWork(task, reworkMapredWork, conf);
if (task.getChildTasks() == null) {
return;
}
for (Task<? extends Serializable> childTask : task.getChildTasks()) {
validate(childTask, reworkMapredWork);
}
}
/**
* Get the row resolver given an operator.
*/
public RowResolver getRowResolver(Operator opt) {
return opParseCtx.get(opt).getRowResolver();
}
/**
* Add default properties for table property. If a default parameter exists
* in the tblProp, the value in tblProp will be kept.
*
* @param table
* property map
* @return Modified table property map
*/
private Map<String, String> addDefaultProperties(Map<String, String> tblProp) {
Map<String, String> retValue;
if (tblProp == null) {
retValue = new HashMap<String, String>();
} else {
retValue = tblProp;
}
String paraString = HiveConf.getVar(conf, ConfVars.NEWTABLEDEFAULTPARA);
if (paraString != null && !paraString.isEmpty()) {
for (String keyValuePair : paraString.split(",")) {
String[] keyValue = keyValuePair.split("=", 2);
if (keyValue.length != 2) {
continue;
}
if (!retValue.containsKey(keyValue[0])) {
retValue.put(keyValue[0], keyValue[1]);
}
}
}
return retValue;
}
/**
* Analyze the create table command. If it is a regular create-table or
* create-table-like statements, we create a DDLWork and return true. If it is
* a create-table-as-select, we get the necessary info such as the SerDe and
* Storage Format and put it in QB, and return false, indicating the rest of
* the semantic analyzer need to deal with the select statement with respect
* to the SerDe and Storage Format.
*/
private ASTNode analyzeCreateTable(ASTNode ast, QB qb)
throws SemanticException {
String tableName = getUnescapedName((ASTNode) ast.getChild(0));
String likeTableName = null;
List<FieldSchema> cols = new ArrayList<FieldSchema>();
List<FieldSchema> partCols = new ArrayList<FieldSchema>();
List<String> bucketCols = new ArrayList<String>();
List<Order> sortCols = new ArrayList<Order>();
int numBuckets = -1;
String comment = null;
String location = null;
Map<String, String> tblProps = null;
boolean ifNotExists = false;
boolean isExt = false;
ASTNode selectStmt = null;
final int CREATE_TABLE = 0; // regular CREATE TABLE
final int CTLT = 1; // CREATE TABLE LIKE ... (CTLT)
final int CTAS = 2; // CREATE TABLE AS SELECT ... (CTAS)
int command_type = CREATE_TABLE;
List<String> skewedColNames = new ArrayList<String>();
List<List<String>> skewedValues = new ArrayList<List<String>>();
Map<List<String>, String> listBucketColValuesMapping = new HashMap<List<String>, String>();
boolean storedAsDirs = false;
RowFormatParams rowFormatParams = new RowFormatParams();
StorageFormat storageFormat = new StorageFormat();
AnalyzeCreateCommonVars shared = new AnalyzeCreateCommonVars();
LOG.info("Creating table " + tableName + " position="
+ ast.getCharPositionInLine());
int numCh = ast.getChildCount();
/*
* Check the 1st-level children and do simple semantic checks: 1) CTLT and
* CTAS should not coexists. 2) CTLT or CTAS should not coexists with column
* list (target table schema). 3) CTAS does not support partitioning (for
* now).
*/
for (int num = 1; num < numCh; num++) {
ASTNode child = (ASTNode) ast.getChild(num);
if (storageFormat.fillStorageFormat(child, shared)) {
continue;
}
switch (child.getToken().getType()) {
case HiveParser.TOK_IFNOTEXISTS:
ifNotExists = true;
break;
case HiveParser.KW_EXTERNAL:
isExt = true;
break;
case HiveParser.TOK_LIKETABLE:
if (child.getChildCount() > 0) {
likeTableName = getUnescapedName((ASTNode) child.getChild(0));
if (likeTableName != null) {
if (command_type == CTAS) {
throw new SemanticException(ErrorMsg.CTAS_CTLT_COEXISTENCE
.getMsg());
}
if (cols.size() != 0) {
throw new SemanticException(ErrorMsg.CTLT_COLLST_COEXISTENCE
.getMsg());
}
}
command_type = CTLT;
}
break;
case HiveParser.TOK_QUERY: // CTAS
if (command_type == CTLT) {
throw new SemanticException(ErrorMsg.CTAS_CTLT_COEXISTENCE.getMsg());
}
if (cols.size() != 0) {
throw new SemanticException(ErrorMsg.CTAS_COLLST_COEXISTENCE.getMsg());
}
if (partCols.size() != 0 || bucketCols.size() != 0) {
boolean dynPart = HiveConf.getBoolVar(conf, HiveConf.ConfVars.DYNAMICPARTITIONING);
if (dynPart == false) {
throw new SemanticException(ErrorMsg.CTAS_PARCOL_COEXISTENCE.getMsg());
} else {
// TODO: support dynamic partition for CTAS
throw new SemanticException(ErrorMsg.CTAS_PARCOL_COEXISTENCE.getMsg());
}
}
if (isExt) {
throw new SemanticException(ErrorMsg.CTAS_EXTTBL_COEXISTENCE.getMsg());
}
command_type = CTAS;
selectStmt = child;
break;
case HiveParser.TOK_TABCOLLIST:
cols = getColumns(child);
break;
case HiveParser.TOK_TABLECOMMENT:
comment = unescapeSQLString(child.getChild(0).getText());
break;
case HiveParser.TOK_TABLEPARTCOLS:
partCols = getColumns((ASTNode) child.getChild(0), false);
break;
case HiveParser.TOK_TABLEBUCKETS:
bucketCols = getColumnNames((ASTNode) child.getChild(0));
if (child.getChildCount() == 2) {
numBuckets = (Integer.valueOf(child.getChild(1).getText()))
.intValue();
} else {
sortCols = getColumnNamesOrder((ASTNode) child.getChild(1));
numBuckets = (Integer.valueOf(child.getChild(2).getText()))
.intValue();
}
break;
case HiveParser.TOK_TABLEROWFORMAT:
rowFormatParams.analyzeRowFormat(shared, child);
break;
case HiveParser.TOK_TABLELOCATION:
location = unescapeSQLString(child.getChild(0).getText());
location = EximUtil.relativeToAbsolutePath(conf, location);
inputs.add(new ReadEntity(new Path(location), FileUtils.isLocalFile(conf, location)));
break;
case HiveParser.TOK_TABLEPROPERTIES:
tblProps = DDLSemanticAnalyzer.getProps((ASTNode) child.getChild(0));
break;
case HiveParser.TOK_TABLESERIALIZER:
child = (ASTNode) child.getChild(0);
shared.serde = unescapeSQLString(child.getChild(0).getText());
if (child.getChildCount() == 2) {
readProps((ASTNode) (child.getChild(1).getChild(0)),
shared.serdeProps);
}
break;
case HiveParser.TOK_FILEFORMAT_GENERIC:
handleGenericFileFormat(child);
break;
case HiveParser.TOK_TABLESKEWED:
/**
* Throw an error if the user tries to use the DDL with
* hive.internal.ddl.list.bucketing.enable set to false.
*/
HiveConf hiveConf = SessionState.get().getConf();
// skewed column names
skewedColNames = analyzeSkewedTablDDLColNames(skewedColNames, child);
// skewed value
analyzeDDLSkewedValues(skewedValues, child);
// stored as directories
storedAsDirs = analyzeStoredAdDirs(child);
break;
default:
throw new AssertionError("Unknown token: " + child.getToken());
}
}
storageFormat.fillDefaultStorageFormat(shared);
if ((command_type == CTAS) && (storageFormat.storageHandler != null)) {
throw new SemanticException(ErrorMsg.CREATE_NON_NATIVE_AS.getMsg());
}
// check for existence of table
if (ifNotExists) {
try {
Table table = getTableWithQN(tableName, false);
if (table != null) { // table exists
return null;
}
} catch (HiveException e) {
e.printStackTrace();
}
}
String[] qualified = Hive.getQualifiedNames(tableName);
String dbName = qualified.length == 1 ? SessionState.get().getCurrentDatabase() : qualified[0];
Database database = getDatabase(dbName);
outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_SHARED));
// Handle different types of CREATE TABLE command
CreateTableDesc crtTblDesc = null;
switch (command_type) {
case CREATE_TABLE: // REGULAR CREATE TABLE DDL
tblProps = addDefaultProperties(tblProps);
crtTblDesc = new CreateTableDesc(tableName, isExt, cols, partCols,
bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim,
rowFormatParams.fieldEscape,
rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim,
comment,
storageFormat.inputFormat, storageFormat.outputFormat, location, shared.serde,
storageFormat.storageHandler, shared.serdeProps, tblProps, ifNotExists, skewedColNames,
skewedValues);
crtTblDesc.setStoredAsSubDirectories(storedAsDirs);
crtTblDesc.setNullFormat(rowFormatParams.nullFormat);
crtTblDesc.validate(conf);
// outputs is empty, which means this create table happens in the current
// database.
SessionState.get().setCommandType(HiveOperation.CREATETABLE);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
crtTblDesc), conf));
break;
case CTLT: // create table like <tbl_name>
tblProps = addDefaultProperties(tblProps);
CreateTableLikeDesc crtTblLikeDesc = new CreateTableLikeDesc(tableName, isExt,
storageFormat.inputFormat, storageFormat.outputFormat, location,
shared.serde, shared.serdeProps, tblProps, ifNotExists, likeTableName);
SessionState.get().setCommandType(HiveOperation.CREATETABLE);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
crtTblLikeDesc), conf));
break;
case CTAS: // create table as select
// Verify that the table does not already exist
try {
Table dumpTable = db.newTable(tableName);
if (null != db.getTable(dumpTable.getDbName(), dumpTable.getTableName(), false)) {
throw new SemanticException(ErrorMsg.TABLE_ALREADY_EXISTS.getMsg(tableName));
}
} catch (HiveException e) {
throw new SemanticException(e);
}
tblProps = addDefaultProperties(tblProps);
crtTblDesc = new CreateTableDesc(dbName, tableName, isExt, cols, partCols,
bucketCols, sortCols, numBuckets, rowFormatParams.fieldDelim,
rowFormatParams.fieldEscape,
rowFormatParams.collItemDelim, rowFormatParams.mapKeyDelim, rowFormatParams.lineDelim,
comment, storageFormat.inputFormat,
storageFormat.outputFormat, location, shared.serde, storageFormat.storageHandler,
shared.serdeProps,
tblProps, ifNotExists, skewedColNames, skewedValues);
crtTblDesc.setStoredAsSubDirectories(storedAsDirs);
crtTblDesc.setNullFormat(rowFormatParams.nullFormat);
qb.setTableDesc(crtTblDesc);
SessionState.get().setCommandType(HiveOperation.CREATETABLE_AS_SELECT);
return selectStmt;
default:
throw new SemanticException("Unrecognized command.");
}
return null;
}
private ASTNode analyzeCreateView(ASTNode ast, QB qb)
throws SemanticException {
String tableName = getUnescapedName((ASTNode) ast.getChild(0));
List<FieldSchema> cols = null;
boolean ifNotExists = false;
boolean orReplace = false;
boolean isAlterViewAs = false;
String comment = null;
ASTNode selectStmt = null;
Map<String, String> tblProps = null;
List<String> partColNames = null;
LOG.info("Creating view " + tableName + " position="
+ ast.getCharPositionInLine());
int numCh = ast.getChildCount();
for (int num = 1; num < numCh; num++) {
ASTNode child = (ASTNode) ast.getChild(num);
switch (child.getToken().getType()) {
case HiveParser.TOK_IFNOTEXISTS:
ifNotExists = true;
break;
case HiveParser.TOK_ORREPLACE:
orReplace = true;
break;
case HiveParser.TOK_QUERY:
selectStmt = child;
break;
case HiveParser.TOK_TABCOLNAME:
cols = getColumns(child);
break;
case HiveParser.TOK_TABLECOMMENT:
comment = unescapeSQLString(child.getChild(0).getText());
break;
case HiveParser.TOK_TABLEPROPERTIES:
tblProps = DDLSemanticAnalyzer.getProps((ASTNode) child.getChild(0));
break;
case HiveParser.TOK_VIEWPARTCOLS:
partColNames = getColumnNames((ASTNode) child.getChild(0));
break;
default:
assert false;
}
}
if (ifNotExists && orReplace){
throw new SemanticException("Can't combine IF NOT EXISTS and OR REPLACE.");
}
if (ast.getToken().getType() == HiveParser.TOK_ALTERVIEW_AS) {
isAlterViewAs = true;
orReplace = true;
}
createVwDesc = new CreateViewDesc(
tableName, cols, comment, tblProps, partColNames,
ifNotExists, orReplace, isAlterViewAs);
unparseTranslator.enable();
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
createVwDesc), conf));
return selectStmt;
}
// validate the create view statement
// the statement could be CREATE VIEW, REPLACE VIEW, or ALTER VIEW AS SELECT
// check semantic conditions
private void validateCreateView(CreateViewDesc createVwDesc)
throws SemanticException {
try {
Table oldView = getTableWithQN(createVwDesc.getViewName(), false);
// ALTER VIEW AS SELECT requires the view must exist
if (createVwDesc.getIsAlterViewAs() && oldView == null) {
String viewNotExistErrorMsg =
"The following view does not exist: " + createVwDesc.getViewName();
throw new SemanticException(
ErrorMsg.ALTER_VIEW_AS_SELECT_NOT_EXIST.getMsg(viewNotExistErrorMsg));
}
//replace view
if (createVwDesc.getOrReplace() && oldView != null) {
// Existing table is not a view
if (!oldView.getTableType().equals(TableType.VIRTUAL_VIEW)) {
String tableNotViewErrorMsg =
"The following is an existing table, not a view: " +
createVwDesc.getViewName();
throw new SemanticException(
ErrorMsg.EXISTING_TABLE_IS_NOT_VIEW.getMsg(tableNotViewErrorMsg));
}
// if old view has partitions, it could not be replaced
String partitionViewErrorMsg =
"The following view has partition, it could not be replaced: " +
createVwDesc.getViewName();
try {
if ((createVwDesc.getPartCols() == null ||
createVwDesc.getPartCols().isEmpty() ||
!createVwDesc.getPartCols().equals(oldView.getPartCols())) &&
!oldView.getPartCols().isEmpty() &&
!db.getPartitions(oldView).isEmpty()) {
throw new SemanticException(
ErrorMsg.REPLACE_VIEW_WITH_PARTITION.getMsg(partitionViewErrorMsg));
}
} catch (HiveException e) {
throw new SemanticException(
ErrorMsg.REPLACE_VIEW_WITH_PARTITION.getMsg(partitionViewErrorMsg));
}
}
} catch (HiveException e) {
throw new SemanticException(e.getMessage());
}
}
// Process the position alias in GROUPBY and ORDERBY
private void processPositionAlias(ASTNode ast) throws SemanticException {
if (HiveConf.getBoolVar(conf,
HiveConf.ConfVars.HIVE_GROUPBY_ORDERBY_POSITION_ALIAS) == false) {
return;
}
if (ast.getChildCount() == 0) {
return;
}
boolean isAllCol;
ASTNode selectNode = null;
ASTNode groupbyNode = null;
ASTNode orderbyNode = null;
// get node type
int child_count = ast.getChildCount();
for (int child_pos = 0; child_pos < child_count; ++child_pos) {
ASTNode node = (ASTNode) ast.getChild(child_pos);
int type = node.getToken().getType();
if (type == HiveParser.TOK_SELECT) {
selectNode = node;
} else if (type == HiveParser.TOK_GROUPBY) {
groupbyNode = node;
} else if (type == HiveParser.TOK_ORDERBY) {
orderbyNode = node;
}
}
if (selectNode != null) {
int selectExpCnt = selectNode.getChildCount();
// replace each of the position alias in GROUPBY with the actual column name
if (groupbyNode != null) {
for (int child_pos = 0; child_pos < groupbyNode.getChildCount(); ++child_pos) {
ASTNode node = (ASTNode) groupbyNode.getChild(child_pos);
if (node.getToken().getType() == HiveParser.Number) {
int pos = Integer.parseInt(node.getText());
if (pos > 0 && pos <= selectExpCnt) {
groupbyNode.setChild(child_pos,
selectNode.getChild(pos - 1).getChild(0));
} else {
throw new SemanticException(
ErrorMsg.INVALID_POSITION_ALIAS_IN_GROUPBY.getMsg(
"Position alias: " + pos + " does not exist\n" +
"The Select List is indexed from 1 to " + selectExpCnt));
}
}
}
}
// replace each of the position alias in ORDERBY with the actual column name
if (orderbyNode != null) {
isAllCol = false;
for (int child_pos = 0; child_pos < selectNode.getChildCount(); ++child_pos) {
ASTNode node = (ASTNode) selectNode.getChild(child_pos).getChild(0);
if (node.getToken().getType() == HiveParser.TOK_ALLCOLREF) {
isAllCol = true;
}
}
for (int child_pos = 0; child_pos < orderbyNode.getChildCount(); ++child_pos) {
ASTNode colNode = (ASTNode) orderbyNode.getChild(child_pos);
ASTNode node = (ASTNode) colNode.getChild(0);
if (node.getToken().getType() == HiveParser.Number) {
if (!isAllCol) {
int pos = Integer.parseInt(node.getText());
if (pos > 0 && pos <= selectExpCnt) {
colNode.setChild(0, selectNode.getChild(pos - 1).getChild(0));
} else {
throw new SemanticException(
ErrorMsg.INVALID_POSITION_ALIAS_IN_ORDERBY.getMsg(
"Position alias: " + pos + " does not exist\n" +
"The Select List is indexed from 1 to " + selectExpCnt));
}
} else {
throw new SemanticException(
ErrorMsg.NO_SUPPORTED_ORDERBY_ALLCOLREF_POS.getMsg());
}
}
}
}
}
// Recursively process through the children ASTNodes
for (int child_pos = 0; child_pos < child_count; ++child_pos) {
processPositionAlias((ASTNode) ast.getChild(child_pos));
}
return;
}
/**
* process analyze ... partial command
*
* separate it from noscan command process so that it provides us flexibility
*
* @param tree
* @throws SemanticException
*/
protected void processPartialScanCommand (ASTNode tree) throws SemanticException {
// check if it is partial scan command
this.checkPartialScan(tree);
//validate partial scan
if (this.partialscan) {
validateAnalyzePartialscan(tree);
}
}
/**
* process analyze ... noscan command
* @param tree
* @throws SemanticException
*/
protected void processNoScanCommand (ASTNode tree) throws SemanticException {
// check if it is noscan command
checkNoScan(tree);
//validate noscan
if (this.noscan) {
validateAnalyzeNoscan(tree);
}
}
/**
* Validate noscan command
*
* @param tree
* @throws SemanticException
*/
private void validateAnalyzeNoscan(ASTNode tree) throws SemanticException {
// since it is noscan, it is true table name in command
String tableName = getUnescapedName((ASTNode) tree.getChild(0).getChild(0));
Table tbl;
try {
tbl = db.getTable(tableName);
} catch (HiveException e) {
throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
}
/* noscan uses hdfs apis to retrieve such information from Namenode. */
/* But that will be specific to hdfs. Through storagehandler mechanism, */
/* storage of table could be on any storage system: hbase, cassandra etc. */
/* A nice error message should be given to user. */
if (tbl.isNonNative()) {
throw new SemanticException(ErrorMsg.ANALYZE_TABLE_NOSCAN_NON_NATIVE.getMsg(tbl
.getTableName()));
}
}
/**
* Validate partialscan command
*
* @param tree
* @throws SemanticException
*/
private void validateAnalyzePartialscan(ASTNode tree) throws SemanticException {
// since it is partialscan, it is true table name in command
String tableName = getUnescapedName((ASTNode) tree.getChild(0).getChild(0));
Table tbl;
try {
tbl = db.getTable(tableName);
} catch (HiveException e) {
throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
}
/* partialscan uses hdfs apis to retrieve such information from Namenode. */
/* But that will be specific to hdfs. Through storagehandler mechanism, */
/* storage of table could be on any storage system: hbase, cassandra etc. */
/* A nice error message should be given to user. */
if (tbl.isNonNative()) {
throw new SemanticException(ErrorMsg.ANALYZE_TABLE_PARTIALSCAN_NON_NATIVE.getMsg(tbl
.getTableName()));
}
/**
* Partial scan doesn't support external table.
*/
if(tbl.getTableType().equals(TableType.EXTERNAL_TABLE)) {
throw new SemanticException(ErrorMsg.ANALYZE_TABLE_PARTIALSCAN_EXTERNAL_TABLE.getMsg(tbl
.getTableName()));
}
if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESTATSAUTOGATHER)) {
throw new SemanticException(ErrorMsg.ANALYZE_TABLE_PARTIALSCAN_AUTOGATHER.getMsg());
}
}
/**
* It will check if this is analyze ... compute statistics noscan
* @param tree
*/
private void checkNoScan(ASTNode tree) {
if (tree.getChildCount() > 1) {
ASTNode child0 = (ASTNode) tree.getChild(0);
ASTNode child1;
if (child0.getToken().getType() == HiveParser.TOK_TAB) {
child0 = (ASTNode) child0.getChild(0);
if (child0.getToken().getType() == HiveParser.TOK_TABNAME) {
child1 = (ASTNode) tree.getChild(1);
if (child1.getToken().getType() == HiveParser.KW_NOSCAN) {
this.noscan = true;
}
}
}
}
}
/**
* It will check if this is analyze ... compute statistics partialscan
* @param tree
*/
private void checkPartialScan(ASTNode tree) {
if (tree.getChildCount() > 1) {
ASTNode child0 = (ASTNode) tree.getChild(0);
ASTNode child1;
if (child0.getToken().getType() == HiveParser.TOK_TAB) {
child0 = (ASTNode) child0.getChild(0);
if (child0.getToken().getType() == HiveParser.TOK_TABNAME) {
child1 = (ASTNode) tree.getChild(1);
if (child1.getToken().getType() == HiveParser.KW_PARTIALSCAN) {
this.partialscan = true;
}
}
}
}
}
public QB getQB() {
return qb;
}
public void setQB(QB qb) {
this.qb = qb;
}
//--------------------------- PTF handling -----------------------------------
/*
* - a partitionTableFunctionSource can be a tableReference, a SubQuery or another
* PTF invocation.
* - For a TABLEREF: set the source to the alias returned by processTable
* - For a SubQuery: set the source to the alias returned by processSubQuery
* - For a PTF invocation: recursively call processPTFChain.
*/
private PTFInputSpec processPTFSource(QB qb, ASTNode inputNode) throws SemanticException{
PTFInputSpec qInSpec = null;
int type = inputNode.getType();
String alias;
switch(type)
{
case HiveParser.TOK_TABREF:
alias = processTable(qb, inputNode);
qInSpec = new PTFQueryInputSpec();
((PTFQueryInputSpec)qInSpec).setType(PTFQueryInputType.TABLE);
((PTFQueryInputSpec)qInSpec).setSource(alias);
break;
case HiveParser.TOK_SUBQUERY:
alias = processSubQuery(qb, inputNode);
qInSpec = new PTFQueryInputSpec();
((PTFQueryInputSpec)qInSpec).setType(PTFQueryInputType.SUBQUERY);
((PTFQueryInputSpec)qInSpec).setSource(alias);
break;
case HiveParser.TOK_PTBLFUNCTION:
qInSpec = processPTFChain(qb, inputNode);
break;
default:
throw new SemanticException(generateErrorMessage(inputNode,
"Unknown input type to PTF"));
}
qInSpec.setAstNode(inputNode);
return qInSpec;
}
/*
* - tree form is
* ^(TOK_PTBLFUNCTION name alias? partitionTableFunctionSource partitioningSpec? arguments*)
* - a partitionTableFunctionSource can be a tableReference, a SubQuery or another
* PTF invocation.
*/
private PartitionedTableFunctionSpec processPTFChain(QB qb, ASTNode ptf)
throws SemanticException{
int child_count = ptf.getChildCount();
if (child_count < 2) {
throw new SemanticException(generateErrorMessage(ptf,
"Not enough Children " + child_count));
}
PartitionedTableFunctionSpec ptfSpec = new PartitionedTableFunctionSpec();
ptfSpec.setAstNode(ptf);
/*
* name
*/
ASTNode nameNode = (ASTNode) ptf.getChild(0);
ptfSpec.setName(nameNode.getText());
int inputIdx = 1;
/*
* alias
*/
ASTNode secondChild = (ASTNode) ptf.getChild(1);
if ( secondChild.getType() == HiveParser.Identifier ) {
ptfSpec.setAlias(secondChild.getText());
inputIdx++;
}
/*
* input
*/
ASTNode inputNode = (ASTNode) ptf.getChild(inputIdx);
ptfSpec.setInput(processPTFSource(qb, inputNode));
int argStartIdx = inputIdx + 1;
/*
* partitioning Spec
*/
int pSpecIdx = inputIdx + 1;
ASTNode pSpecNode = ptf.getChildCount() > inputIdx ?
(ASTNode) ptf.getChild(pSpecIdx) : null;
if (pSpecNode != null && pSpecNode.getType() == HiveParser.TOK_PARTITIONINGSPEC)
{
PartitioningSpec partitioning = processPTFPartitionSpec(pSpecNode);
ptfSpec.setPartitioning(partitioning);
argStartIdx++;
}
/*
* arguments
*/
for(int i=argStartIdx; i < ptf.getChildCount(); i++)
{
ptfSpec.addArg((ASTNode) ptf.getChild(i));
}
return ptfSpec;
}
/*
* - invoked during FROM AST tree processing, on encountering a PTF invocation.
* - tree form is
* ^(TOK_PTBLFUNCTION name partitionTableFunctionSource partitioningSpec? arguments*)
* - setup a PTFInvocationSpec for this top level PTF invocation.
*/
private void processPTF(QB qb, ASTNode ptf) throws SemanticException{
PartitionedTableFunctionSpec ptfSpec = processPTFChain(qb, ptf);
if ( ptfSpec.getAlias() != null ) {
qb.addAlias(ptfSpec.getAlias());
}
PTFInvocationSpec spec = new PTFInvocationSpec();
spec.setFunction(ptfSpec);
qb.addPTFNodeToSpec(ptf, spec);
}
private void handleQueryWindowClauses(QB qb, Phase1Ctx ctx_1, ASTNode node)
throws SemanticException {
WindowingSpec spec = qb.getWindowingSpec(ctx_1.dest);
for(int i=0; i < node.getChildCount(); i++) {
processQueryWindowClause(spec, (ASTNode) node.getChild(i));
}
}
private PartitionSpec processPartitionSpec(ASTNode node) {
PartitionSpec pSpec = new PartitionSpec();
int exprCnt = node.getChildCount();
for(int i=0; i < exprCnt; i++) {
PartitionExpression exprSpec = new PartitionExpression();
exprSpec.setExpression((ASTNode) node.getChild(i));
pSpec.addExpression(exprSpec);
}
return pSpec;
}
private OrderSpec processOrderSpec(ASTNode sortNode) {
OrderSpec oSpec = new OrderSpec();
int exprCnt = sortNode.getChildCount();
for(int i=0; i < exprCnt; i++) {
OrderExpression exprSpec = new OrderExpression();
exprSpec.setExpression((ASTNode) sortNode.getChild(i).getChild(0));
if ( sortNode.getChild(i).getType() == HiveParser.TOK_TABSORTCOLNAMEASC ) {
exprSpec.setOrder(org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.Order.ASC);
}
else {
exprSpec.setOrder(org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.Order.DESC);
}
oSpec.addExpression(exprSpec);
}
return oSpec;
}
private PartitioningSpec processPTFPartitionSpec(ASTNode pSpecNode)
{
PartitioningSpec partitioning = new PartitioningSpec();
ASTNode firstChild = (ASTNode) pSpecNode.getChild(0);
int type = firstChild.getType();
int exprCnt;
if ( type == HiveParser.TOK_DISTRIBUTEBY || type == HiveParser.TOK_CLUSTERBY )
{
PartitionSpec pSpec = processPartitionSpec(firstChild);
partitioning.setPartSpec(pSpec);
ASTNode sortNode = pSpecNode.getChildCount() > 1 ? (ASTNode) pSpecNode.getChild(1) : null;
if ( sortNode != null )
{
OrderSpec oSpec = processOrderSpec(sortNode);
partitioning.setOrderSpec(oSpec);
}
}
else if ( type == HiveParser.TOK_SORTBY || type == HiveParser.TOK_ORDERBY ) {
ASTNode sortNode = firstChild;
OrderSpec oSpec = processOrderSpec(sortNode);
partitioning.setOrderSpec(oSpec);
}
return partitioning;
}
private WindowFunctionSpec processWindowFunction(ASTNode node, ASTNode wsNode)
throws SemanticException {
WindowFunctionSpec wfSpec = new WindowFunctionSpec();
switch(node.getType()) {
case HiveParser.TOK_FUNCTIONSTAR:
wfSpec.setStar(true);
break;
case HiveParser.TOK_FUNCTIONDI:
wfSpec.setDistinct(true);
break;
}
if ( wfSpec.isDistinct() ) {
throw new SemanticException(generateErrorMessage(node,
"Count/Sum distinct not supported with Windowing"));
}
wfSpec.setExpression(node);
ASTNode nameNode = (ASTNode) node.getChild(0);
wfSpec.setName(nameNode.getText());
for(int i=1; i < node.getChildCount()-1; i++) {
ASTNode child = (ASTNode) node.getChild(i);
wfSpec.addArg(child);
}
if ( wsNode != null ) {
WindowSpec ws = processWindowSpec(wsNode);
wfSpec.setWindowSpec(ws);
}
return wfSpec;
}
private boolean containsLeadLagUDF(ASTNode expressionTree) {
int exprTokenType = expressionTree.getToken().getType();
if (exprTokenType == HiveParser.TOK_FUNCTION) {
assert (expressionTree.getChildCount() != 0);
if (expressionTree.getChild(0).getType() == HiveParser.Identifier) {
String functionName = unescapeIdentifier(expressionTree.getChild(0)
.getText());
functionName = functionName.toLowerCase();
if ( FunctionRegistry.LAG_FUNC_NAME.equals(functionName) ||
FunctionRegistry.LEAD_FUNC_NAME.equals(functionName)
) {
return true;
}
}
}
for (int i = 0; i < expressionTree.getChildCount(); i++) {
if ( containsLeadLagUDF((ASTNode) expressionTree.getChild(i))) {
return true;
}
}
return false;
}
private void processQueryWindowClause(WindowingSpec spec, ASTNode node)
throws SemanticException {
ASTNode nameNode = (ASTNode) node.getChild(0);
ASTNode wsNode = (ASTNode) node.getChild(1);
if(spec.getWindowSpecs() != null && spec.getWindowSpecs().containsKey(nameNode.getText())){
throw new SemanticException(generateErrorMessage(nameNode,
"Duplicate definition of window " + nameNode.getText() +
" is not allowed"));
}
WindowSpec ws = processWindowSpec(wsNode);
spec.addWindowSpec(nameNode.getText(), ws);
}
private WindowSpec processWindowSpec(ASTNode node) throws SemanticException {
String sourceId = null;
PartitionSpec partition = null;
OrderSpec order = null;
WindowFrameSpec windowFrame = null;
boolean hasSrcId = false, hasPartSpec = false, hasWF = false;
int srcIdIdx = -1, partIdx = -1, wfIdx = -1;
for(int i=0; i < node.getChildCount(); i++)
{
int type = node.getChild(i).getType();
switch(type)
{
case HiveParser.Identifier:
hasSrcId = true; srcIdIdx = i;
break;
case HiveParser.TOK_PARTITIONINGSPEC:
hasPartSpec = true; partIdx = i;
break;
case HiveParser.TOK_WINDOWRANGE:
case HiveParser.TOK_WINDOWVALUES:
hasWF = true; wfIdx = i;
break;
}
}
WindowSpec ws = new WindowSpec();
if (hasSrcId) {
ASTNode nameNode = (ASTNode) node.getChild(srcIdIdx);
ws.setSourceId(nameNode.getText());
}
if (hasPartSpec) {
ASTNode partNode = (ASTNode) node.getChild(partIdx);
PartitioningSpec partitioning = processPTFPartitionSpec(partNode);
ws.setPartitioning(partitioning);
}
if ( hasWF)
{
ASTNode wfNode = (ASTNode) node.getChild(wfIdx);
WindowFrameSpec wfSpec = processWindowFrame(wfNode);
ws.setWindowFrame(wfSpec);
}
return ws;
}
private WindowFrameSpec processWindowFrame(ASTNode node) throws SemanticException {
int type = node.getType();
BoundarySpec start = null, end = null;
/*
* A WindowFrame may contain just the Start Boundary or in the
* between style of expressing a WindowFrame both boundaries
* are specified.
*/
start = processBoundary(type, (ASTNode) node.getChild(0));
if ( node.getChildCount() > 1 ) {
end = processBoundary(type, (ASTNode) node.getChild(1));
}
return new WindowFrameSpec(start, end);
}
private BoundarySpec processBoundary(int frameType, ASTNode node) throws SemanticException {
BoundarySpec bs = frameType == HiveParser.TOK_WINDOWRANGE ?
new RangeBoundarySpec() : new ValueBoundarySpec();
int type = node.getType();
boolean hasAmt = true;
switch(type)
{
case HiveParser.KW_PRECEDING:
bs.setDirection(Direction.PRECEDING);
break;
case HiveParser.KW_FOLLOWING:
bs.setDirection(Direction.FOLLOWING);
break;
case HiveParser.KW_CURRENT:
bs = new CurrentRowSpec();
hasAmt = false;
break;
}
if ( hasAmt )
{
ASTNode amtNode = (ASTNode) node.getChild(0);
if ( amtNode.getType() == HiveParser.KW_UNBOUNDED)
{
bs.setAmt(BoundarySpec.UNBOUNDED_AMOUNT);
}
else
{
int amt = Integer.parseInt(amtNode.getText());
if ( amt < 0 ) {
throw new SemanticException(
"Window Frame Boundary Amount must be a +ve integer, amount provide is: " + amt);
}
bs.setAmt(amt);
}
}
return bs;
}
/*
* check if a Select Expr is a constant.
* - current logic used is to look for HiveParser.TOK_TABLE_OR_COL
* - if there is none then the expression is a constant.
*/
private static class ConstantExprCheck implements ContextVisitor {
boolean isConstant = true;
@Override
public void visit(Object t, Object parent, int childIndex, Map labels) {
if ( !isConstant ) {
return;
}
ASTNode node = (ASTNode) t;
if (ParseDriver.adaptor.getType(t) == HiveParser.TOK_TABLE_OR_COL ) {
isConstant = false;
}
}
public void reset() {
isConstant = true;
}
protected boolean isConstant() {
return isConstant;
}
}
private static class AggregationExprCheck implements ContextVisitor {
HashMap<String, ASTNode> destAggrExprs;
boolean isAggr = false;
public AggregationExprCheck(HashMap<String, ASTNode> destAggrExprs) {
super();
this.destAggrExprs = destAggrExprs;
}
@Override
public void visit(Object t, Object parent, int childIndex, Map labels) {
if ( isAggr ) {
return;
}
if ( destAggrExprs.values().contains(t)) {
isAggr = true;
}
}
public void reset() {
isAggr = false;
}
protected boolean isAggr() {
return isAggr;
}
}
/*
* Returns false if there is a SelectExpr that is not a constant or an aggr.
*
*/
private boolean isValidGroupBySelectList(QB currQB, String clause){
ConstantExprCheck constantExprCheck = new ConstantExprCheck();
AggregationExprCheck aggrExprCheck = new AggregationExprCheck(
currQB.getParseInfo().getAggregationExprsForClause(clause));
TreeWizard tw = new TreeWizard(ParseDriver.adaptor, HiveParser.tokenNames);
ASTNode selectNode = currQB.getParseInfo().getSelForClause(clause);
/*
* for Select Distinct Queries we don't move any aggregations.
*/
if ( selectNode != null && selectNode.getType() == HiveParser.TOK_SELECTDI ) {
return true;
}
for (int i = 0; selectNode != null && i < selectNode.getChildCount(); i++) {
ASTNode selectExpr = (ASTNode) selectNode.getChild(i);
//check for TOK_HINTLIST expressions on ast
if(selectExpr.getType() != HiveParser.TOK_SELEXPR){
continue;
}
constantExprCheck.reset();
PTFTranslator.visit(selectExpr.getChild(0), constantExprCheck);
if ( !constantExprCheck.isConstant() ) {
aggrExprCheck.reset();
PTFTranslator.visit(selectExpr.getChild(0), aggrExprCheck);
if (!aggrExprCheck.isAggr() ) {
return false;
}
}
}
return true;
}
//--------------------------- PTF handling: PTFInvocationSpec to PTFDesc --------------------------
private PTFDesc translatePTFInvocationSpec(PTFInvocationSpec ptfQSpec, RowResolver inputRR)
throws SemanticException{
PTFDesc ptfDesc = null;
PTFTranslator translator = new PTFTranslator();
ptfDesc = translator.translate(ptfQSpec, this, conf, inputRR, unparseTranslator);
return ptfDesc;
}
Operator genPTFPlan(PTFInvocationSpec ptfQSpec, Operator input) throws SemanticException {
ArrayList<PTFInvocationSpec> componentQueries = PTFTranslator.componentize(ptfQSpec);
for (PTFInvocationSpec ptfSpec : componentQueries) {
input = genPTFPlanForComponentQuery(ptfSpec, input);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Created PTF Plan ");
}
return input;
}
/**
* Construct the data structures containing ExprNodeDesc for partition
* columns and order columns. Use the input definition to construct the list
* of output columns for the ReduceSinkOperator
*
* @throws SemanticException
*/
void buildPTFReduceSinkDetails(PartitionedTableFunctionDef tabDef,
RowResolver inputRR,
ArrayList<ExprNodeDesc> partCols,
ArrayList<ExprNodeDesc> valueCols,
ArrayList<ExprNodeDesc> orderCols,
Map<String, ExprNodeDesc> colExprMap,
List<String> outputColumnNames,
StringBuilder orderString,
RowResolver rsOpRR,
RowResolver extractRR) throws SemanticException {
List<PTFExpressionDef> partColList = tabDef.getPartition().getExpressions();
for (PTFExpressionDef colDef : partColList) {
partCols.add(colDef.getExprNode());
orderCols.add(colDef.getExprNode());
orderString.append('+');
}
/*
* Order columns are used as key columns for constructing
* the ReduceSinkOperator
* Since we do not explicitly add these to outputColumnNames,
* we need to set includeKeyCols = false while creating the
* ReduceSinkDesc
*/
List<OrderExpressionDef> orderColList = tabDef.getOrder().getExpressions();
for (int i = 0; i < orderColList.size(); i++) {
OrderExpressionDef colDef = orderColList.get(i);
org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.Order order = colDef.getOrder();
if (order.name().equals("ASC")) {
orderString.append('+');
} else {
orderString.append('-');
}
orderCols.add(colDef.getExprNode());
}
ArrayList<ColumnInfo> colInfoList = inputRR.getColumnInfos();
/*
* construct the ReduceSinkRR
*/
int pos = 0;
for (ColumnInfo colInfo : colInfoList) {
ExprNodeDesc valueColExpr = new ExprNodeColumnDesc(colInfo.getType(), colInfo
.getInternalName(), colInfo.getTabAlias(), colInfo
.getIsVirtualCol());
valueCols.add(valueColExpr);
String internalName = SemanticAnalyzer.getColumnInternalName(pos++);
outputColumnNames.add(internalName);
colExprMap.put(internalName, valueColExpr);
String[] alias = inputRR.reverseLookup(colInfo.getInternalName());
ColumnInfo newColInfo = new ColumnInfo(
internalName, colInfo.getType(), alias[0],
colInfo.getIsVirtualCol(), colInfo.isHiddenVirtualCol());
rsOpRR.put(alias[0], alias[1], newColInfo);
}
/*
* construct the ExtractRR
*/
LinkedHashMap<String[], ColumnInfo> colsAddedByHaving =
new LinkedHashMap<String[], ColumnInfo>();
pos = 0;
for (ColumnInfo colInfo : colInfoList) {
String[] alias = inputRR.reverseLookup(colInfo.getInternalName());
/*
* if we have already encountered this colInfo internalName.
* We encounter it again because it must be put for the Having clause.
* We will add these entries in the end; in a loop on colsAddedByHaving. See below.
*/
if ( colsAddedByHaving.containsKey(alias)) {
continue;
}
ASTNode astNode = PTFTranslator.getASTNode(colInfo, inputRR);
ColumnInfo eColInfo = new ColumnInfo(
SemanticAnalyzer.getColumnInternalName(pos++), colInfo.getType(), alias[0],
colInfo.getIsVirtualCol(), colInfo.isHiddenVirtualCol());
if ( astNode == null ) {
extractRR.put(alias[0], alias[1], eColInfo);
}
else {
/*
* in case having clause refers to this column may have been added twice;
* once with the ASTNode.toStringTree as the alias
* and then with the real alias.
*/
extractRR.putExpression(astNode, eColInfo);
if ( !astNode.toStringTree().toLowerCase().equals(alias[1]) ) {
colsAddedByHaving.put(alias, eColInfo);
}
}
}
for(Map.Entry<String[], ColumnInfo> columnAddedByHaving : colsAddedByHaving.entrySet() ) {
String[] alias = columnAddedByHaving.getKey();
ColumnInfo eColInfo = columnAddedByHaving.getValue();
extractRR.put(alias[0], alias[1], eColInfo);
}
}
private Operator genPTFPlanForComponentQuery(PTFInvocationSpec ptfQSpec, Operator input)
throws SemanticException {
/*
* 1. Create the PTFDesc from the Qspec attached to this QB.
*/
RowResolver rr = opParseCtx.get(input).getRowResolver();
PTFDesc ptfDesc = translatePTFInvocationSpec(ptfQSpec, rr);
RowResolver rsOpRR = new RowResolver();
/*
* Build an RR for the Extract Op from the ReduceSink Op's RR.
* Why?
* We need to remove the Virtual Columns present in the RS's RR. The OI
* that gets passed to Extract at runtime doesn't contain the Virtual Columns.
* So internal names get changed. Consider testCase testJoinWithLeadLag,
* which is a self join on part and also has a Windowing expression.
* The RR of the RS op at translation time looks something like this:
* (_co1,_col2,..,_col7, _col8(vc=true),_col9(vc=true),
* _col10,_col11,.._col15(vc=true),_col16(vc=true),..)
* At runtime the Virtual columns are removed and all the columns after _col7
* are shifted 1 or 2 positions.
* So in child Operators ColumnExprNodeDesc's are no longer referring to the right columns.
*
* So we build a new RR for the Extract Op, with the Virtual Columns removed.
* We hand this to the PTFTranslator as the
* starting RR to use to translate a PTF Chain.
*/
RowResolver extractOpRR = new RowResolver();
/*
* 2. build Map-side Op Graph. Graph template is either:
* Input -> PTF_map -> ReduceSink
* or
* Input -> ReduceSink
*
* Here the ExprNodeDescriptors in the QueryDef are based on the Input Operator's RR.
*/
{
PartitionedTableFunctionDef tabDef = ptfDesc.getStartOfChain();
/*
* a. add Map-side PTF Operator if needed
*/
if (tabDef.isTransformsRawInput() )
{
RowResolver ptfMapRR = tabDef.getRawInputShape().getRr();
ptfDesc.setMapSide(true);
input = putOpInsertMap(OperatorFactory.getAndMakeChild(ptfDesc,
new RowSchema(ptfMapRR.getColumnInfos()),
input), ptfMapRR);
rr = opParseCtx.get(input).getRowResolver();
}
/*
* b. Build Reduce Sink Details (keyCols, valueCols, outColNames etc.) for this ptfDesc.
*/
ArrayList<ExprNodeDesc> partCols = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> valueCols = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> orderCols = new ArrayList<ExprNodeDesc>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
List<String> outputColumnNames = new ArrayList<String>();
StringBuilder orderString = new StringBuilder();
/*
* Use the input RR of TableScanOperator in case there is no map-side
* reshape of input.
* If the parent of ReduceSinkOperator is PTFOperator, use it's
* output RR.
*/
buildPTFReduceSinkDetails(tabDef,
rr,
partCols,
valueCols,
orderCols,
colExprMap,
outputColumnNames,
orderString,
rsOpRR,
extractOpRR);
input = putOpInsertMap(OperatorFactory.getAndMakeChild(PlanUtils
.getReduceSinkDesc(orderCols,
valueCols, outputColumnNames, false,
-1, partCols, orderString.toString(), -1),
new RowSchema(rsOpRR.getColumnInfos()), input), rsOpRR);
input.setColumnExprMap(colExprMap);
}
/*
* 3. build Reduce-side Op Graph
*/
{
/*
* b. Construct Extract Operator.
*/
input = putOpInsertMap(OperatorFactory.getAndMakeChild(
new ExtractDesc(
new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,
Utilities.ReduceField.VALUE
.toString(), "", false)),
new RowSchema(extractOpRR.getColumnInfos()),
input), extractOpRR);
/*
* c. Rebuilt the QueryDef.
* Why?
* - so that the ExprNodeDescriptors in the QueryDef are based on the
* Extract Operator's RowResolver
*/
rr = opParseCtx.get(input).getRowResolver();
ptfDesc = translatePTFInvocationSpec(ptfQSpec, rr);
/*
* d. Construct PTF Operator.
*/
RowResolver ptfOpRR = ptfDesc.getFuncDef().getOutputShape().getRr();
input = putOpInsertMap(OperatorFactory.getAndMakeChild(ptfDesc,
new RowSchema(ptfOpRR.getColumnInfos()),
input), ptfOpRR);
}
return input;
}
//--------------------------- Windowing handling: PTFInvocationSpec to PTFDesc --------------------
Operator genWindowingPlan(WindowingSpec wSpec, Operator input) throws SemanticException {
wSpec.validateAndMakeEffective();
WindowingComponentizer groups = new WindowingComponentizer(wSpec);
RowResolver rr = opParseCtx.get(input).getRowResolver();
while(groups.hasNext() ) {
wSpec = groups.next(conf, this, unparseTranslator, rr);
input = genReduceSinkPlanForWindowing(wSpec, rr, input);
rr = opParseCtx.get(input).getRowResolver();
PTFTranslator translator = new PTFTranslator();
PTFDesc ptfDesc = translator.translate(wSpec, this, conf, rr, unparseTranslator);
RowResolver ptfOpRR = ptfDesc.getFuncDef().getOutputShape().getRr();
input = putOpInsertMap(OperatorFactory.getAndMakeChild(ptfDesc,
new RowSchema(ptfOpRR.getColumnInfos()),
input), ptfOpRR);
rr = ptfOpRR;
}
return input;
}
private Operator genReduceSinkPlanForWindowing(WindowingSpec spec,
RowResolver inputRR,
Operator input) throws SemanticException{
ArrayList<ExprNodeDesc> partCols = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> valueCols = new ArrayList<ExprNodeDesc>();
ArrayList<ExprNodeDesc> orderCols = new ArrayList<ExprNodeDesc>();
Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>();
List<String> outputColumnNames = new ArrayList<String>();
StringBuilder orderString = new StringBuilder();
ArrayList<PartitionExpression> partColList = spec.getQueryPartitionSpec().getExpressions();
for (PartitionExpression partCol : partColList) {
ExprNodeDesc partExpr = genExprNodeDesc(partCol.getExpression(), inputRR);
partCols.add(partExpr);
orderCols.add(partExpr);
orderString.append('+');
}
ArrayList<OrderExpression> orderColList = spec.getQueryOrderSpec() == null ?
new ArrayList<PTFInvocationSpec.OrderExpression>() :
spec.getQueryOrderSpec().getExpressions();
for (int i = 0; i < orderColList.size(); i++) {
OrderExpression orderCol = orderColList.get(i);
org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.Order order = orderCol.getOrder();
if (order.name().equals("ASC")) {
orderString.append('+');
} else {
orderString.append('-');
}
ExprNodeDesc orderExpr = genExprNodeDesc(orderCol.getExpression(), inputRR);
orderCols.add(orderExpr);
}
ArrayList<ColumnInfo> colInfoList = inputRR.getColumnInfos();
RowResolver rsNewRR = new RowResolver();
int pos = 0;
for (ColumnInfo colInfo : colInfoList) {
ExprNodeDesc valueColExpr = new ExprNodeColumnDesc(colInfo.getType(), colInfo
.getInternalName(), colInfo.getTabAlias(), colInfo
.getIsVirtualCol());
valueCols.add(valueColExpr);
String internalName = SemanticAnalyzer.getColumnInternalName(pos++);
outputColumnNames.add(internalName);
colExprMap.put(internalName, valueColExpr);
String[] alias = inputRR.reverseLookup(colInfo.getInternalName());
ColumnInfo newColInfo = new ColumnInfo(
internalName, colInfo.getType(), alias[0],
colInfo.getIsVirtualCol(), colInfo.isHiddenVirtualCol());
rsNewRR.put(alias[0], alias[1], newColInfo);
String[] altMapping = inputRR.getAlternateMappings(colInfo.getInternalName());
if ( altMapping != null ) {
rsNewRR.put(altMapping[0], altMapping[1], newColInfo);
}
}
input = putOpInsertMap(OperatorFactory.getAndMakeChild(PlanUtils
.getReduceSinkDesc(orderCols,
valueCols, outputColumnNames, false,
-1, partCols, orderString.toString(), -1),
new RowSchema(rsNewRR.getColumnInfos()), input), rsNewRR);
input.setColumnExprMap(colExprMap);
// Construct the RR for extract operator
RowResolver extractRR = new RowResolver();
LinkedHashMap<String[], ColumnInfo> colsAddedByHaving =
new LinkedHashMap<String[], ColumnInfo>();
pos = 0;
for (ColumnInfo colInfo : colInfoList) {
String[] alias = inputRR.reverseLookup(colInfo.getInternalName());
/*
* if we have already encountered this colInfo internalName.
* We encounter it again because it must be put for the Having clause.
* We will add these entries in the end; in a loop on colsAddedByHaving. See below.
*/
if ( colsAddedByHaving.containsKey(alias)) {
continue;
}
ASTNode astNode = PTFTranslator.getASTNode(colInfo, inputRR);
ColumnInfo eColInfo = new ColumnInfo(
SemanticAnalyzer.getColumnInternalName(pos++), colInfo.getType(), alias[0],
colInfo.getIsVirtualCol(), colInfo.isHiddenVirtualCol());
if ( astNode == null ) {
extractRR.put(alias[0], alias[1], eColInfo);
}
else {
/*
* in case having clause refers to this column may have been added twice;
* once with the ASTNode.toStringTree as the alias
* and then with the real alias.
*/
extractRR.putExpression(astNode, eColInfo);
if ( !astNode.toStringTree().toLowerCase().equals(alias[1]) ) {
colsAddedByHaving.put(alias, eColInfo);
}
}
String[] altMapping = inputRR.getAlternateMappings(colInfo.getInternalName());
if ( altMapping != null ) {
extractRR.put(altMapping[0], altMapping[1], eColInfo);
}
}
for(Map.Entry<String[], ColumnInfo> columnAddedByHaving : colsAddedByHaving.entrySet() ) {
String[] alias = columnAddedByHaving.getKey();
ColumnInfo eColInfo = columnAddedByHaving.getValue();
extractRR.put(alias[0], alias[1], eColInfo);
}
/*
* b. Construct Extract Operator.
*/
input = putOpInsertMap(OperatorFactory.getAndMakeChild(
new ExtractDesc(
new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,
Utilities.ReduceField.VALUE
.toString(), "", false)),
new RowSchema(inputRR.getColumnInfos()),
input), extractRR);
return input;
}
public static ArrayList<WindowExpressionSpec> parseSelect(String selectExprStr)
throws SemanticException
{
ASTNode selNode = null;
try {
ParseDriver pd = new ParseDriver();
selNode = pd.parseSelect(selectExprStr, null);
} catch (ParseException pe) {
throw new SemanticException(pe);
}
ArrayList<WindowExpressionSpec> selSpec = new ArrayList<WindowExpressionSpec>();
int childCount = selNode.getChildCount();
for (int i = 0; i < childCount; i++) {
ASTNode selExpr = (ASTNode) selNode.getChild(i);
if (selExpr.getType() != HiveParser.TOK_SELEXPR) {
throw new SemanticException(String.format(
"Only Select expressions supported in dynamic select list: %s", selectExprStr));
}
ASTNode expr = (ASTNode) selExpr.getChild(0);
if (expr.getType() == HiveParser.TOK_ALLCOLREF) {
throw new SemanticException(
String.format("'%s' column not allowed in dynamic select list", selectExprStr));
}
ASTNode aliasNode = selExpr.getChildCount() > 1
&& selExpr.getChild(1).getType() == HiveParser.Identifier ?
(ASTNode) selExpr.getChild(1) : null;
String alias = null;
if ( aliasNode != null ) {
alias = aliasNode.getText();
}
else {
String[] tabColAlias = getColAlias(selExpr, null, null, true, -1);
alias = tabColAlias[1];
}
WindowExpressionSpec exprSpec = new WindowExpressionSpec();
exprSpec.setAlias(alias);
exprSpec.setExpression(expr);
selSpec.add(exprSpec);
}
return selSpec;
}
private void addAlternateGByKeyMappings(ASTNode gByExpr, ColumnInfo colInfo,
Operator<? extends OperatorDesc> reduceSinkOp, RowResolver gByRR) {
if ( gByExpr.getType() == HiveParser.DOT
&& gByExpr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL ) {
String tab_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr
.getChild(0).getChild(0).getText());
String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(
gByExpr.getChild(1).getText());
gByRR.put(tab_alias, col_alias, colInfo);
} else if ( gByExpr.getType() == HiveParser.TOK_TABLE_OR_COL ) {
String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr
.getChild(0).getText());
String tab_alias = null;
/*
* If the input to the GBy has a tab alias for the column, then add an entry
* based on that tab_alias.
* For e.g. this query:
* select b.x, count(*) from t1 b group by x
* needs (tab_alias=b, col_alias=x) in the GBy RR.
* tab_alias=b comes from looking at the RowResolver that is the ancestor
* before any GBy/ReduceSinks added for the GBY operation.
*/
Operator<? extends OperatorDesc> parent = reduceSinkOp;
while ( parent instanceof ReduceSinkOperator ||
parent instanceof GroupByOperator ) {
parent = parent.getParentOperators().get(0);
}
RowResolver parentRR = opParseCtx.get(parent).getRowResolver();
try {
ColumnInfo pColInfo = parentRR.get(tab_alias, col_alias);
tab_alias = pColInfo == null ? null : pColInfo.getTabAlias();
} catch(SemanticException se) {
}
gByRR.put(tab_alias, col_alias, colInfo);
}
}
private WriteEntity.WriteType determineWriteType(LoadTableDesc ltd, boolean isNonNativeTable) {
// Don't know the characteristics of non-native tables,
// and don't have a rational way to guess, so assume the most
// conservative case.
if (isNonNativeTable) return WriteEntity.WriteType.INSERT_OVERWRITE;
else return (ltd.getReplace() ? WriteEntity.WriteType.INSERT_OVERWRITE :
WriteEntity.WriteType.INSERT);
}
/**** Temporary Place Holder For Optiq plan Gen, Optimizer ****/
/*
* Entry point to Optimizations using Optiq.
*/
// TODO: Extend QP to indicate LV, Multi Insert, Cubes, Rollups...
private boolean canHandleQuery() {
boolean runOptiqPlanner = false;
if ((queryProperties.getJoinCount() < HiveConf.getIntVar(conf,
HiveConf.ConfVars.HIVE_CBO_MAX_JOINS_SUPPORTED))
&& (queryProperties.getOuterJoinCount() == 0)
&& !queryProperties.hasClusterBy()
&& !queryProperties.hasDistributeBy()
&& !queryProperties.hasSortBy()
&& !queryProperties.hasWindowing()
&& !queryProperties.hasPTF()
&& !queryProperties.usesScript()
&& !queryProperties.hasMultiDestQuery()
&& !queryProperties.hasFilterWithSubQuery()) {
runOptiqPlanner = true;
}
return runOptiqPlanner;
}
private class OptiqBasedPlanner implements Frameworks.PlannerAction<RelNode> {
RelOptCluster m_cluster;
RelOptSchema m_relOptSchema;
SchemaPlus m_rootSchema;
SemanticException m_semanticException;
// TODO: Do we need to keep track of RR, ColNameToPosMap for every op or
// just last one.
LinkedHashMap<RelNode, RowResolver> m_relToHiveRR = new LinkedHashMap<RelNode, RowResolver>();
LinkedHashMap<RelNode, ImmutableMap<String, Integer>> m_relToHiveColNameOptiqPosMap = new LinkedHashMap<RelNode, ImmutableMap<String, Integer>>();
private ASTNode getOptimizedAST() throws SemanticException {
ASTNode optiqOptimizedAST = null;
RelNode optimizedOptiqPlan = null;
try {
optimizedOptiqPlan = Frameworks.withPlanner(this);
} catch (Exception e) {
if (m_semanticException != null)
throw m_semanticException;
else
throw new RuntimeException(e);
}
optiqOptimizedAST = ASTConverter
.convert(optimizedOptiqPlan, resultSchema);
return optiqOptimizedAST;
}
@Override
public RelNode apply(RelOptCluster cluster, RelOptSchema relOptSchema,
SchemaPlus rootSchema) {
RelOptPlanner planner = HiveVolcanoPlanner.createPlanner();
/*
* recreate cluster, so that it picks up the additional traitDef
*/
final RelOptQuery query = new RelOptQuery(planner);
final RexBuilder rexBuilder = cluster.getRexBuilder();
cluster = query.createCluster(rexBuilder.getTypeFactory(), rexBuilder);
m_cluster = cluster;
m_relOptSchema = relOptSchema;
m_rootSchema = rootSchema;
RelNode optiqPlan = null;
try {
optiqPlan = genLogicalPlan(qb);
} catch (SemanticException e) {
m_semanticException = e;
throw new RuntimeException(e);
}
List<RelMetadataProvider> list = Lists.newArrayList();
list.add(HiveDefaultRelMetadataProvider.INSTANCE);
planner.registerMetadataProviders(list);
RelMetadataProvider chainedProvider = ChainedRelMetadataProvider.of(list);
cluster.setMetadataProvider(new CachingRelMetadataProvider(
chainedProvider, planner));
planner.addRule(HiveSwapJoinRule.INSTANCE);
planner.addRule(HivePushJoinThroughJoinRule.LEFT);
planner.addRule(HivePushJoinThroughJoinRule.RIGHT);
if (HiveConf.getBoolVar(conf,
HiveConf.ConfVars.HIVE_CBO_PULLPROJECTABOVEJOIN_RULE)) {
planner.addRule(HivePullUpProjectsAboveJoinRule.BOTH_PROJECT);
planner.addRule(HivePullUpProjectsAboveJoinRule.LEFT_PROJECT);
planner.addRule(HivePullUpProjectsAboveJoinRule.RIGHT_PROJECT);
planner.addRule(HiveMergeProjectRule.INSTANCE);
}
RelTraitSet desiredTraits = cluster.traitSetOf(HiveRel.CONVENTION,
RelCollationImpl.EMPTY);
RelNode rootRel = optiqPlan;
if (!optiqPlan.getTraitSet().equals(desiredTraits)) {
rootRel = planner.changeTraits(optiqPlan, desiredTraits);
}
planner.setRoot(rootRel);
return planner.findBestExp();
}
private RelNode genUnionLogicalPlan(String unionalias, String leftalias,
RelNode leftOp, String rightalias, RelNode rightOp) {
return null;
}
private RelNode genJoinRelNode(RelNode leftRel, RelNode rightRel,
JoinType hiveJoinType, ASTNode joinCond) throws SemanticException {
RelNode joinRel = null;
// 1. construct the RowResolver for the new Join Node by combining row
// resolvers from left, right
RowResolver leftRR = this.m_relToHiveRR.get(leftRel);
RowResolver rightRR = this.m_relToHiveRR.get(rightRel);
RowResolver joinRR = null;
if (hiveJoinType != JoinType.LEFTSEMI) {
joinRR = RowResolver.getCombinedRR(leftRR, rightRR);
} else {
joinRR = new RowResolver();
RowResolver.add(joinRR, leftRR, 0);
}
// 2. Construct ExpressionNodeDesc representing Join Condition
RexNode optiqJoinCond = null;
if (joinCond != null) {
Map<ASTNode, ExprNodeDesc> exprNodes = JoinCondnTypeCheckProcFactory
.genExprNode(joinCond, new JoinTypeCheckCtx(leftRR, rightRR));
ExprNodeDesc joinCondnExprNode = exprNodes.get(joinCond);
List<RelNode> inputRels = new ArrayList<RelNode>();
inputRels.add(leftRel);
inputRels.add(rightRel);
optiqJoinCond = RexNodeConverter.convert(m_cluster, joinCondnExprNode,
inputRels, m_relToHiveRR, m_relToHiveColNameOptiqPosMap, false);
} else {
optiqJoinCond = RexNodeConverter.getAlwaysTruePredicate(m_cluster);
}
// 3. Validate that join condition is legal (i.e no function refering to
// both sides of join, only equi join)
// TODO: Join filter handling (only supported for OJ by runtime or is it
// supported for IJ as well)
// 4. Construct Join Rel Node
boolean leftSemiJoin = false;
JoinRelType optiqJoinType;
switch (hiveJoinType) {
case LEFTOUTER:
optiqJoinType = JoinRelType.LEFT;
break;
case RIGHTOUTER:
optiqJoinType = JoinRelType.RIGHT;
break;
case FULLOUTER:
optiqJoinType = JoinRelType.FULL;
break;
case LEFTSEMI:
optiqJoinType = JoinRelType.INNER;
leftSemiJoin = true;
break;
case INNER:
default:
optiqJoinType = JoinRelType.INNER;
break;
}
joinRel = HiveJoinRel.getJoin(m_cluster, leftRel, rightRel,
optiqJoinCond, optiqJoinType, leftSemiJoin);
// 5. Add new JoinRel & its RR to the maps
m_relToHiveColNameOptiqPosMap.put(joinRel,
this.buildHiveToOptiqColumnMap(joinRR, joinRel));
m_relToHiveRR.put(joinRel, joinRR);
return joinRel;
}
/**
* Generate Join Logical Plan Relnode by walking through the join AST.
*
* @param qb
* @param aliasToRel
* Alias(Table/Relation alias) to RelNode; only read and not
* written in to by this method
* @return
* @throws SemanticException
*/
private RelNode genJoinLogicalPlan(ASTNode joinParseTree,
Map<String, RelNode> aliasToRel) throws SemanticException {
RelNode leftRel = null;
RelNode rightRel = null;
JoinType hiveJoinType = null;
if (joinParseTree.getToken().getType() == HiveParser.TOK_UNIQUEJOIN) {
throw new RuntimeException("CBO does not support Unique Join");
}
// 1. Determine Join Type
// TODO: What about TOK_CROSSJOIN, TOK_MAPJOIN
switch (joinParseTree.getToken().getType()) {
case HiveParser.TOK_LEFTOUTERJOIN:
hiveJoinType = JoinType.LEFTOUTER;
break;
case HiveParser.TOK_RIGHTOUTERJOIN:
hiveJoinType = JoinType.RIGHTOUTER;
break;
case HiveParser.TOK_FULLOUTERJOIN:
hiveJoinType = JoinType.FULLOUTER;
break;
case HiveParser.TOK_LEFTSEMIJOIN:
hiveJoinType = JoinType.LEFTSEMI;
break;
default:
hiveJoinType = JoinType.INNER;
break;
}
// 2. Get Left Table Alias
ASTNode left = (ASTNode) joinParseTree.getChild(0);
if ((left.getToken().getType() == HiveParser.TOK_TABREF)
|| (left.getToken().getType() == HiveParser.TOK_SUBQUERY)
|| (left.getToken().getType() == HiveParser.TOK_PTBLFUNCTION)) {
String tableName = getUnescapedUnqualifiedTableName(
(ASTNode) left.getChild(0)).toLowerCase();
String leftTableAlias = left.getChildCount() == 1 ? tableName
: unescapeIdentifier(left.getChild(left.getChildCount() - 1)
.getText().toLowerCase());
// ptf node form is: ^(TOK_PTBLFUNCTION $name $alias?
// partitionTableFunctionSource partitioningSpec? expression*)
// guranteed to have an lias here: check done in processJoin
leftTableAlias = (left.getToken().getType() == HiveParser.TOK_PTBLFUNCTION) ? unescapeIdentifier(left
.getChild(1).getText().toLowerCase())
: leftTableAlias;
leftRel = aliasToRel.get(leftTableAlias);
} else if (isJoinToken(left)) {
leftRel = genJoinLogicalPlan(left, aliasToRel);
} else {
assert (false);
}
// 3. Get Right Table Alias
ASTNode right = (ASTNode) joinParseTree.getChild(1);
if ((right.getToken().getType() == HiveParser.TOK_TABREF)
|| (right.getToken().getType() == HiveParser.TOK_SUBQUERY)
|| (right.getToken().getType() == HiveParser.TOK_PTBLFUNCTION)) {
String tableName = getUnescapedUnqualifiedTableName(
(ASTNode) right.getChild(0)).toLowerCase();
String rightTableAlias = right.getChildCount() == 1 ? tableName
: unescapeIdentifier(right.getChild(right.getChildCount() - 1)
.getText().toLowerCase());
// ptf node form is: ^(TOK_PTBLFUNCTION $name $alias?
// partitionTableFunctionSource partitioningSpec? expression*)
// guranteed to have an lias here: check done in processJoin
rightTableAlias = (right.getToken().getType() == HiveParser.TOK_PTBLFUNCTION) ? unescapeIdentifier(right
.getChild(1).getText().toLowerCase())
: rightTableAlias;
rightRel = aliasToRel.get(rightTableAlias);
} else {
assert (false);
}
// 4. Get Join Condn
ASTNode joinCond = (ASTNode) joinParseTree.getChild(2);
// 5. Create Join rel
return genJoinRelNode(leftRel, rightRel, hiveJoinType, joinCond);
}
private RelNode genTableLogicalPlan(String tableAlias, QB qb) {
RowResolver rr = new RowResolver();
HiveTableScanRel tableRel = null;
try {
// 1. Get Table Alias
String alias_id = getAliasId(tableAlias, qb);
// 2. Get Table Metadata
Table tab = qb.getMetaData().getSrcForAlias(tableAlias);
// 3. Get Table Logical Schema (Row Type)
// NOTE: Table logical schema = Non Partition Cols + Partition Cols +
// Virtual Cols
// 3.1 Add Column info for non partion cols (Object Inspector fields)
StructObjectInspector rowObjectInspector = (StructObjectInspector) tab
.getDeserializer().getObjectInspector();
List<? extends StructField> fields = rowObjectInspector
.getAllStructFieldRefs();
ColumnInfo colInfo;
String colName;
ArrayList<ColumnInfo> cInfoLst = new ArrayList<ColumnInfo>();
for (int i = 0; i < fields.size(); i++) {
colName = fields.get(i).getFieldName();
colInfo = new ColumnInfo(fields.get(i).getFieldName(),
TypeInfoUtils.getTypeInfoFromObjectInspector(fields.get(i)
.getFieldObjectInspector()), tableAlias, false);
colInfo.setSkewedCol((isSkewedCol(tableAlias, qb, colName)) ? true
: false);
rr.put(tableAlias, colName, colInfo);
cInfoLst.add(colInfo);
}
// TODO: Fix this
ArrayList<ColumnInfo> columnsThatNeedsStats = new ArrayList<ColumnInfo>(
cInfoLst);
// 3.2 Add column info corresponding to partition columns
for (FieldSchema part_col : tab.getPartCols()) {
colName = part_col.getName();
colInfo = new ColumnInfo(colName,
TypeInfoFactory.getPrimitiveTypeInfo(part_col.getType()),
tableAlias, true);
rr.put(tableAlias, colName, colInfo);
cInfoLst.add(colInfo);
}
// 3.3 Add column info corresponding to virtual columns
Iterator<VirtualColumn> vcs = VirtualColumn.getRegistry(conf)
.iterator();
while (vcs.hasNext()) {
VirtualColumn vc = vcs.next();
colInfo = new ColumnInfo(vc.getName(), vc.getTypeInfo(), tableAlias,
true, vc.getIsHidden());
rr.put(tableAlias, vc.getName(), colInfo);
cInfoLst.add(colInfo);
}
// 3.4 Build row type from field <type, name>
RelDataType rowType = TypeConverter.getType(m_cluster, rr, null);
// 4. Build RelOptAbstractTable
RelOptHiveTable optTable = new RelOptHiveTable(m_relOptSchema,
tableAlias, rowType, tab, columnsThatNeedsStats);
// 5. Build Hive Table Scan Rel
tableRel = new HiveTableScanRel(m_cluster,
m_cluster.traitSetOf(HiveRel.CONVENTION), optTable, rowType);
// 6. Add Schema(RR) to RelNode-Schema map
ImmutableMap<String, Integer> hiveToOptiqColMap = buildHiveToOptiqColumnMap(
rr, tableRel);
m_relToHiveRR.put(tableRel, rr);
m_relToHiveColNameOptiqPosMap.put(tableRel, hiveToOptiqColMap);
} catch (Exception e) {
throw (new RuntimeException(e));
}
return tableRel;
}
private RelNode genFilterRelNode(ASTNode filterExpr, RelNode srcRel)
throws SemanticException {
ExprNodeDesc filterCondn = genExprNodeDesc(filterExpr,
m_relToHiveRR.get(srcRel));
ImmutableMap<String, Integer> hiveColNameOptiqPosMap = this.m_relToHiveColNameOptiqPosMap
.get(srcRel);
RexNode convertedFilterExpr = new RexNodeConverter(m_cluster,
srcRel.getRowType(), hiveColNameOptiqPosMap, 0, true)
.convert(filterCondn);
RelNode filterRel = new HiveFilterRel(m_cluster,
m_cluster.traitSetOf(HiveRel.CONVENTION), srcRel, convertedFilterExpr);
this.m_relToHiveColNameOptiqPosMap.put(filterRel, hiveColNameOptiqPosMap);
m_relToHiveRR.put(filterRel, m_relToHiveRR.get(srcRel));
m_relToHiveColNameOptiqPosMap.put(filterRel, hiveColNameOptiqPosMap);
return filterRel;
}
private RelNode genFilterLogicalPlan(QB qb, RelNode srcRel)
throws SemanticException {
RelNode filterRel = null;
Iterator<ASTNode> whereClauseIterator = getQBParseInfo(qb)
.getDestToWhereExpr().values().iterator();
if (whereClauseIterator.hasNext()) {
filterRel = genFilterRelNode((ASTNode) whereClauseIterator.next().getChild(0), srcRel);
}
return filterRel;
}
private final Map<String, Aggregation> AGG_MAP = ImmutableMap
.<String, Aggregation> builder()
.put(
"count",
(Aggregation) SqlStdOperatorTable.COUNT)
.put(
"sum",
SqlStdOperatorTable.SUM)
.put(
"min",
SqlStdOperatorTable.MIN)
.put(
"max",
SqlStdOperatorTable.MAX)
.put(
"avg",
SqlStdOperatorTable.AVG)
.put(
"stddev_samp",
SqlFunctionConverter
.hiveAggFunction("stddev_samp"))
.build();
/**
* Class to store GenericUDAF related information.
*/
private class AggInfo {
private final List<ExprNodeDesc> m_aggParams;
private final TypeInfo m_returnType;
private final String m_udfName;
private final boolean m_distinct;
private AggInfo(List<ExprNodeDesc> aggParams, TypeInfo returnType,
String udfName, boolean isDistinct) {
m_aggParams = aggParams;
m_returnType = returnType;
m_udfName = udfName;
m_distinct = isDistinct;
}
}
private AggregateCall convertAgg(AggInfo agg, RelNode input,
List<RexNode> gbChildProjLst, RexNodeConverter converter,
HashMap<RexNode, Integer> rexNodeToPosMap, Integer childProjLstIndx)
throws SemanticException {
final Aggregation aggregation = AGG_MAP.get(agg.m_udfName);
if (aggregation == null) {
throw new AssertionError("agg not found: " + agg.m_udfName);
}
List<Integer> argList = new ArrayList<Integer>();
RelDataType type = TypeConverter.convert(agg.m_returnType,
this.m_cluster.getTypeFactory());
if (aggregation.equals(SqlStdOperatorTable.AVG)) {
type = type.getField("sum", false).getType();
}
// TODO: Does HQL allows expressions as aggregate args or can it only be
// projections from child?
Integer inputIndx;
RexNode rexNd = null;
for (ExprNodeDesc expr : agg.m_aggParams) {
rexNd = converter.convert(expr);
inputIndx = rexNodeToPosMap.get(rexNd);
if (inputIndx == null) {
gbChildProjLst.add(rexNd);
rexNodeToPosMap.put(rexNd, childProjLstIndx);
inputIndx = childProjLstIndx;
childProjLstIndx++;
}
argList.add(inputIndx);
}
/*
* set the type to the first arg, it there is one; because the RTi set on
* Aggregation call assumes this is the output type.
*/
if (argList.size() > 0) {
RexNode rex = converter.convert(agg.m_aggParams.get(0));
type = rex.getType();
}
return new AggregateCall(aggregation, agg.m_distinct, argList, type, null);
}
private RelNode genGBRelNode(List<ExprNodeDesc> gbExprs,
List<AggInfo> aggInfoLst, RelNode srcRel) throws SemanticException {
RowResolver gbInputRR = this.m_relToHiveRR.get(srcRel);
ArrayList<ColumnInfo> signature = gbInputRR.getRowSchema().getSignature();
ImmutableMap<String, Integer> posMap = this.m_relToHiveColNameOptiqPosMap
.get(srcRel);
RexNodeConverter converter = new RexNodeConverter(this.m_cluster,
srcRel.getRowType(), posMap, 0, false);
final List<RexNode> gbChildProjLst = Lists.newArrayList();
final HashMap<RexNode, Integer> rexNodeToPosMap = new HashMap<RexNode, Integer>();
final BitSet groupSet = new BitSet();
Integer gbIndx = 0;
RexNode rnd;
for (ExprNodeDesc key : gbExprs) {
rnd = converter.convert(key);
gbChildProjLst.add(rnd);
groupSet.set(gbIndx);
rexNodeToPosMap.put(rnd, gbIndx);
gbIndx++;
}
List<AggregateCall> aggregateCalls = Lists.newArrayList();
int i = aggInfoLst.size();
for (AggInfo agg : aggInfoLst) {
aggregateCalls.add(convertAgg(agg, srcRel, gbChildProjLst, converter,
rexNodeToPosMap, gbChildProjLst.size()));
}
RelNode gbInputRel = HiveProjectRel.create(srcRel, gbChildProjLst, null);
HiveRel aggregateRel = null;
try {
aggregateRel = new HiveAggregateRel(m_cluster,
m_cluster.traitSetOf(HiveRel.CONVENTION), gbInputRel, groupSet,
aggregateCalls);
} catch (InvalidRelException e) {
throw new SemanticException(e);
}
return aggregateRel;
}
private void addAlternateGByKeyMappings(ASTNode gByExpr,
ColumnInfo colInfo, RowResolver gByInputRR, RowResolver gByRR) {
if (gByExpr.getType() == HiveParser.DOT
&& gByExpr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL) {
String tab_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr
.getChild(0).getChild(0).getText());
String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr
.getChild(1).getText());
gByRR.put(tab_alias, col_alias, colInfo);
} else if (gByExpr.getType() == HiveParser.TOK_TABLE_OR_COL) {
String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr
.getChild(0).getText());
String tab_alias = null;
/*
* If the input to the GBy has a tab alias for the column, then add an
* entry based on that tab_alias. For e.g. this query: select b.x,
* count(*) from t1 b group by x needs (tab_alias=b, col_alias=x) in the
* GBy RR. tab_alias=b comes from looking at the RowResolver that is the
* ancestor before any GBy/ReduceSinks added for the GBY operation.
*/
try {
ColumnInfo pColInfo = gByInputRR.get(tab_alias, col_alias);
tab_alias = pColInfo == null ? null : pColInfo.getTabAlias();
} catch (SemanticException se) {
}
gByRR.put(tab_alias, col_alias, colInfo);
}
}
/**
* Generate GB plan.
*
* @param qb
* @param srcRel
* @return TODO: 1. Grouping Sets (roll up..)
* @throws SemanticException
*/
private RelNode genGBLogicalPlan(QB qb, RelNode srcRel)
throws SemanticException {
RelNode gbRel = null;
QBParseInfo qbp = getQBParseInfo(qb);
// 1. Gather GB Expressions (AST)
// NOTE: Multi Insert is not supported
String detsClauseName = qbp.getClauseNames().iterator().next();
List<ASTNode> grpByAstExprs = getGroupByForClause(qbp, detsClauseName);
if (grpByAstExprs != null && !grpByAstExprs.isEmpty()) {
// 2. Input, Output Row Resolvers
RowResolver groupByInputRowResolver = this.m_relToHiveRR.get(srcRel);
RowResolver groupByOutputRowResolver = new RowResolver();
groupByOutputRowResolver.setIsExprResolver(true);
// 3. Construct GB Keys (ExprNode)
ArrayList<ExprNodeDesc> gbExprNDescLst = new ArrayList<ExprNodeDesc>();
ArrayList<String> outputColumnNames = new ArrayList<String>();
for (int i = 0; i < grpByAstExprs.size(); ++i) {
ASTNode grpbyExpr = grpByAstExprs.get(i);
Map<ASTNode, ExprNodeDesc> astToExprNDescMap = TypeCheckProcFactory
.genExprNode(grpbyExpr, new TypeCheckCtx(groupByInputRowResolver));
ExprNodeDesc grpbyExprNDesc = astToExprNDescMap.get(grpbyExpr);
if (grpbyExprNDesc == null)
throw new RuntimeException("Invalid Column Reference: "
+ grpbyExpr.dump());
gbExprNDescLst.add(grpbyExprNDesc);
// TODO: Should we use grpbyExprNDesc.getTypeInfo()? what if expr is
// UDF
String field = getColumnInternalName(i);
outputColumnNames.add(field);
ColumnInfo oColInfo = new ColumnInfo(field,
grpbyExprNDesc.getTypeInfo(), null, false);
groupByOutputRowResolver.putExpression(grpbyExpr, oColInfo);
// TODO: Alternate mappings, are they necessary?
addAlternateGByKeyMappings(grpbyExpr, oColInfo,
groupByInputRowResolver, groupByOutputRowResolver);
}
// 4. Construct aggregation function Info
ArrayList<AggInfo> aggregations = new ArrayList<AggInfo>();
HashMap<String, ASTNode> aggregationTrees = qbp
.getAggregationExprsForClause(detsClauseName);
assert (aggregationTrees != null);
int numDistinctUDFs = 0;
for (ASTNode value : aggregationTrees.values()) {
// 4.1 Convert UDF Params to ExprNodeDesc
ArrayList<ExprNodeDesc> aggParameters = new ArrayList<ExprNodeDesc>();
for (int i = 1; i < value.getChildCount(); i++) {
ASTNode paraExpr = (ASTNode) value.getChild(i);
ExprNodeDesc paraExprNode = genExprNodeDesc(paraExpr,
groupByInputRowResolver);
aggParameters.add(paraExprNode);
}
// 4.2 Determine type of UDF
// This is the GenericUDAF name
String aggName = unescapeIdentifier(value.getChild(0).getText());
boolean isDistinct = value.getType() == HiveParser.TOK_FUNCTIONDI;
boolean isAllColumns = value.getType() == HiveParser.TOK_FUNCTIONSTAR;
if (isDistinct) {
numDistinctUDFs++;
}
Mode amode = groupByDescModeToUDAFMode(GroupByDesc.Mode.COMPLETE,
isDistinct);
GenericUDAFEvaluator genericUDAFEvaluator = getGenericUDAFEvaluator(
aggName, aggParameters, value, isDistinct, isAllColumns);
assert (genericUDAFEvaluator != null);
GenericUDAFInfo udaf = getGenericUDAFInfo(genericUDAFEvaluator,
amode, aggParameters);
AggInfo aInfo = new AggInfo(aggParameters, udaf.returnType, aggName,
isDistinct);
aggregations.add(aInfo);
String field = getColumnInternalName(gbExprNDescLst.size()
+ aggregations.size() - 1);
outputColumnNames.add(field);
groupByOutputRowResolver.putExpression(value, new ColumnInfo(field,
aInfo.m_returnType, "", false));
}
gbRel = genGBRelNode(gbExprNDescLst, aggregations, srcRel);
m_relToHiveColNameOptiqPosMap.put(gbRel,
buildHiveToOptiqColumnMap(groupByOutputRowResolver, gbRel));
this.m_relToHiveRR.put(gbRel, groupByOutputRowResolver);
}
return gbRel;
}
private RelNode genOBLogicalPlan(QB qb, RelNode srcRel)
throws SemanticException {
RelNode relToRet = null;
QBParseInfo qbp = getQBParseInfo(qb);
String dest = qbp.getClauseNames().iterator().next();
ASTNode obAST = qbp.getOrderByForClause(dest);
if (obAST != null) {
// 1. OB Expr sanity test
// in strict mode, in the presence of order by, limit must be specified
Integer limit = qb.getParseInfo().getDestLimit(dest);
if (conf.getVar(HiveConf.ConfVars.HIVEMAPREDMODE).equalsIgnoreCase(
"strict")
&& limit == null) {
throw new SemanticException(generateErrorMessage(obAST,
ErrorMsg.NO_LIMIT_WITH_ORDERBY.getMsg()));
}
// 2. Walk through OB exprs and extract field collations and additional
// virtual columns needed
final List<RexNode> newVCLst = new ArrayList<RexNode>();
final List<RelFieldCollation> fieldCollations = Lists.newArrayList();
int fieldIndex = 0;
List<Node> obASTExprLst = obAST.getChildren();
ASTNode obASTExpr;
List<Pair<ASTNode, TypeInfo>> vcASTTypePairs = new ArrayList<Pair<ASTNode, TypeInfo>>();
RowResolver inputRR = m_relToHiveRR.get(srcRel);
RowResolver outputRR = new RowResolver();
RexNode rnd;
RexNodeConverter converter = new RexNodeConverter(m_cluster,
srcRel.getRowType(), m_relToHiveColNameOptiqPosMap.get(srcRel), 0,
false);
int srcRelRecordSz = srcRel.getRowType().getFieldCount();
for (int i = 0; i < obASTExprLst.size(); i++) {
// 2.1 Convert AST Expr to ExprNode
obASTExpr = (ASTNode) obASTExprLst.get(i);
Map<ASTNode, ExprNodeDesc> astToExprNDescMap = TypeCheckProcFactory
.genExprNode(obASTExpr, new TypeCheckCtx(inputRR));
ExprNodeDesc obExprNDesc = astToExprNDescMap.get((ASTNode) obASTExpr
.getChild(0));
if (obExprNDesc == null)
throw new SemanticException("Invalid order by expression: "
+ obASTExpr.toString());
// 2.2 Convert ExprNode to RexNode
rnd = converter.convert(obExprNDesc);
// 2.3 Determine the index of ob expr in child schema
// NOTE: Optiq can not take compound exprs in OB without it being
// present in the child (& hence we add a child Project Rel)
if (rnd instanceof RexInputRef) {
fieldIndex = ((RexInputRef) rnd).getIndex();
} else {
fieldIndex = srcRelRecordSz + newVCLst.size();
newVCLst.add(rnd);
vcASTTypePairs.add(new Pair<ASTNode, TypeInfo>((ASTNode) obASTExpr
.getChild(0), obExprNDesc.getTypeInfo()));
}
// 2.4 Determine the Direction of order by
org.eigenbase.rel.RelFieldCollation.Direction order = RelFieldCollation.Direction.DESCENDING;
if (obASTExpr.getType() == HiveParser.TOK_TABSORTCOLNAMEASC) {
order = RelFieldCollation.Direction.ASCENDING;
}
// 2.5 Add to field collations
fieldCollations.add(new RelFieldCollation(fieldIndex, order));
}
// 3. Add Child Project Rel if needed
RelNode obInputRel = srcRel;
if (!newVCLst.isEmpty()) {
List<RexNode> originalInputRefs = Lists.transform(srcRel.getRowType()
.getFieldList(), new Function<RelDataTypeField, RexNode>() {
public RexNode apply(RelDataTypeField input) {
return new RexInputRef(input.getIndex(), input.getType());
}
});
obInputRel = HiveProjectRel.create(srcRel,
CompositeList.of(originalInputRefs, newVCLst), null);
}
// 4. Construct SortRel
RelTraitSet traitSet = m_cluster.traitSetOf(HiveRel.CONVENTION);
RelCollation canonizedCollation = traitSet.canonize(RelCollationImpl
.of(fieldCollations));
// TODO: Is it better to introduce a
// project on top to restrict VC from showing up in sortRel type
RelNode sortRel = new HiveSortRel(m_cluster, traitSet, obInputRel,
canonizedCollation, null, null);
// 5. Construct OB Parent Rel If needed
// Construct a parent Project if OB has virtual columns(vc) otherwise
// vc would show up in the result
// TODO: If OB is part of sub query & Parent Query select is not of the
// type "select */.*..." then parent project is not needed
relToRet = sortRel;
if (!newVCLst.isEmpty()) {
List<RexNode> obParentRelProjs = Lists.transform(srcRel
.getRowType().getFieldList(),
new Function<RelDataTypeField, RexNode>() {
public RexNode apply(RelDataTypeField input) {
return new RexInputRef(input.getIndex(), input.getType());
}
});
relToRet = HiveProjectRel.create(sortRel, obParentRelProjs, null);
}
// 6. Construct output RR
RowResolver.add(outputRR, inputRR, 0);
// 7. Update the maps
// NOTE: Output RR for SortRel is considered same as its input; we may
// end up not using VC that is present in sort rel. Also note that
// rowtype of sortrel is the type of it child; if child happens to be
// synthetic project that we introduced then that projectrel would
// contain the vc.
ImmutableMap<String, Integer> hiveColNameOptiqPosMap = buildHiveToOptiqColumnMap(
outputRR, relToRet);
m_relToHiveRR.put(relToRet, outputRR);
m_relToHiveColNameOptiqPosMap.put(relToRet, hiveColNameOptiqPosMap);
}
return relToRet;
}
private RelNode genLimitLogicalPlan(QB qb, RelNode srcRel)
throws SemanticException {
HiveRel sortRel = null;
QBParseInfo qbp = getQBParseInfo(qb);
Integer limit = qbp.getDestToLimit().get(
qbp.getClauseNames().iterator().next());
if (limit != null) {
RexNode fetch = m_cluster.getRexBuilder().makeExactLiteral(
BigDecimal.valueOf(limit));
RelTraitSet traitSet = m_cluster.traitSetOf(HiveRel.CONVENTION);
RelCollation canonizedCollation = traitSet
.canonize(RelCollationImpl.EMPTY);
sortRel = new HiveSortRel(m_cluster, traitSet, srcRel,
canonizedCollation, null, fetch);
RowResolver outputRR = new RowResolver();
RowResolver.add(outputRR, m_relToHiveRR.get(srcRel), 0);
ImmutableMap<String, Integer> hiveColNameOptiqPosMap = buildHiveToOptiqColumnMap(
outputRR, sortRel);
m_relToHiveRR.put(sortRel, outputRR);
m_relToHiveColNameOptiqPosMap.put(sortRel, hiveColNameOptiqPosMap);
}
return sortRel;
}
/**
* NOTE: there can only be one select caluse since we don't handle multi
* destination insert.
*
* @throws SemanticException
*/
private RelNode genSelectLogicalPlan(QB qb, RelNode srcRel)
throws SemanticException {
boolean subQuery;
ArrayList<ExprNodeDesc> col_list = new ArrayList<ExprNodeDesc>();
// 1. Get Select Expression List
QBParseInfo qbp = getQBParseInfo(qb);
String selClauseName = qbp.getClauseNames().iterator().next();
ASTNode selExprList = qbp.getSelForClause(selClauseName);
// 2.Row resolvers for input, output
RowResolver out_rwsch = new RowResolver();
ASTNode trfm = null;
Integer pos = Integer.valueOf(0);
RowResolver inputRR = this.m_relToHiveRR.get(srcRel);
// 3. Query Hints
// TODO: Handle Query Hints; currently we ignore them
boolean selectStar = false;
int posn = 0;
boolean hintPresent = (selExprList.getChild(0).getType() == HiveParser.TOK_HINTLIST);
if (hintPresent) {
posn++;
}
// 4. Determine if select corresponds to a subquery
subQuery = qb.getParseInfo().getIsSubQ();
// 4. Bailout if select involves Transform
boolean isInTransform = (selExprList.getChild(posn).getChild(0).getType() == HiveParser.TOK_TRANSFORM);
if (isInTransform) {
throw new RuntimeException("SELECT TRANSFORM not supported");
}
// 5. Bailout if select involves UDTF
ASTNode udtfExpr = (ASTNode) selExprList.getChild(posn).getChild(0);
GenericUDTF genericUDTF = null;
int udtfExprType = udtfExpr.getType();
if (udtfExprType == HiveParser.TOK_FUNCTION
|| udtfExprType == HiveParser.TOK_FUNCTIONSTAR) {
String funcName = TypeCheckProcFactory.DefaultExprProcessor
.getFunctionText(udtfExpr, true);
FunctionInfo fi = FunctionRegistry.getFunctionInfo(funcName);
if (fi != null) {
genericUDTF = fi.getGenericUDTF();
}
if (genericUDTF != null) {
throw new RuntimeException("SELECT UDTF not supported");
}
}
// 6. Iterate over all expression (after SELECT)
ASTNode exprList = selExprList;
int startPosn = posn;
List<String> tabAliasesForAllProjs = getTabAliases(inputRR);
for (int i = startPosn; i < exprList.getChildCount(); ++i) {
// 6.1 child can be EXPR AS ALIAS, or EXPR.
ASTNode child = (ASTNode) exprList.getChild(i);
boolean hasAsClause = (!isInTransform) && (child.getChildCount() == 2);
// 6.2 bail out if it is windowing spec
boolean isWindowSpec = child.getChildCount() == 3 ? (child.getChild(2)
.getType() == HiveParser.TOK_WINDOWSPEC) : false;
if (isWindowSpec)
throw new RuntimeException("Windowing is not supported yet");
// 6.3 EXPR AS (ALIAS,...) parses, but is only allowed for UDTF's
// This check is not needed and invalid when there is a transform b/c
// the
// AST's are slightly different.
if (child.getChildCount() > 2) {
throw new SemanticException(generateErrorMessage(
(ASTNode) child.getChild(2), ErrorMsg.INVALID_AS.getMsg()));
}
ASTNode expr;
String tabAlias;
String colAlias;
// 6.4 Get rid of TOK_SELEXPR
expr = (ASTNode) child.getChild(0);
String[] colRef = getColAlias(child, autogenColAliasPrfxLbl, inputRR,
autogenColAliasPrfxIncludeFuncName, i);
tabAlias = colRef[0];
colAlias = colRef[1];
// 6.5 Build ExprNode corresponding to colums
if (expr.getType() == HiveParser.TOK_ALLCOLREF) {
pos = genColListRegex(".*", expr.getChildCount() == 0 ? null
: getUnescapedName((ASTNode) expr.getChild(0)).toLowerCase(),
expr, col_list, inputRR, pos, out_rwsch, tabAliasesForAllProjs,
subQuery);
selectStar = true;
} else if (expr.getType() == HiveParser.TOK_TABLE_OR_COL
&& !hasAsClause && !inputRR.getIsExprResolver()
&& isRegex(unescapeIdentifier(expr.getChild(0).getText()), conf)) {
// In case the expression is a regex COL.
// This can only happen without AS clause
// We don't allow this for ExprResolver - the Group By case
pos = genColListRegex(unescapeIdentifier(expr.getChild(0).getText()),
null, expr, col_list, inputRR, pos, out_rwsch, tabAliasesForAllProjs,
subQuery);
} else if (expr.getType() == HiveParser.DOT
&& expr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL
&& inputRR.hasTableAlias(unescapeIdentifier(expr.getChild(0)
.getChild(0).getText().toLowerCase())) && !hasAsClause
&& !inputRR.getIsExprResolver()
&& isRegex(unescapeIdentifier(expr.getChild(1).getText()), conf)) {
// In case the expression is TABLE.COL (col can be regex).
// This can only happen without AS clause
// We don't allow this for ExprResolver - the Group By case
pos = genColListRegex(unescapeIdentifier(expr.getChild(1).getText()),
unescapeIdentifier(expr.getChild(0).getChild(0).getText()
.toLowerCase()), expr, col_list, inputRR, pos, out_rwsch,
tabAliasesForAllProjs, subQuery);
} else {
// Case when this is an expression
TypeCheckCtx tcCtx = new TypeCheckCtx(inputRR);
// We allow stateful functions in the SELECT list (but nowhere else)
tcCtx.setAllowStatefulFunctions(true);
ExprNodeDesc exp = genExprNodeDesc(expr, inputRR, tcCtx);
String recommended = recommendName(exp, colAlias);
if (recommended != null && out_rwsch.get(null, recommended) == null) {
colAlias = recommended;
}
col_list.add(exp);
if (subQuery) {
out_rwsch.checkColumn(tabAlias, colAlias);
}
ColumnInfo colInfo = new ColumnInfo(getColumnInternalName(pos),
exp.getWritableObjectInspector(), tabAlias, false);
colInfo
.setSkewedCol((exp instanceof ExprNodeColumnDesc) ? ((ExprNodeColumnDesc) exp)
.isSkewedCol() : false);
out_rwsch.put(tabAlias, colAlias, colInfo);
if (exp instanceof ExprNodeColumnDesc) {
ExprNodeColumnDesc colExp = (ExprNodeColumnDesc) exp;
String[] altMapping = inputRR.getAlternateMappings(colExp
.getColumn());
if (altMapping != null) {
out_rwsch.put(altMapping[0], altMapping[1], colInfo);
}
}
pos = Integer.valueOf(pos.intValue() + 1);
}
}
selectStar = selectStar && exprList.getChildCount() == posn + 1;
// 7. Replace NULL with CAST(NULL AS STRING)
ArrayList<String> columnNames = new ArrayList<String>();
for (int i = 0; i < col_list.size(); i++) {
// Replace NULL with CAST(NULL AS STRING)
if (col_list.get(i) instanceof ExprNodeNullDesc) {
col_list.set(i, new ExprNodeConstantDesc(
TypeInfoFactory.stringTypeInfo, null));
}
columnNames.add(getColumnInternalName(i));
}
// 8. Convert Hive projections to Optiq
List<RexNode> optiqColLst = new ArrayList<RexNode>();
RexNodeConverter rexNodeConv = new RexNodeConverter(m_cluster,
srcRel.getRowType(),
buildHiveColNameToInputPosMap(col_list, inputRR), 0, false);
for (ExprNodeDesc colExpr : col_list) {
optiqColLst.add(rexNodeConv.convert(colExpr));
}
// 9. Construct Hive Project Rel
// 9.1. Prepend column names with '_o_'
/*
* Hive treats names that start with '_c' as internalNames; so change the
* names so we don't run into this issue when converting back to Hive AST.
*/
List<String> oFieldNames = Lists.transform(columnNames,
new Function<String, String>() {
public String apply(String hName) {
return "_o_" + hName;
}
});
// 9.2 Build Optiq Rel Node for project using converted projections & col
// names
HiveRel selRel = HiveProjectRel.create(srcRel, optiqColLst, oFieldNames);
// 10. Keep track of colname-to-posmap && RR for new select
this.m_relToHiveColNameOptiqPosMap.put(selRel,
buildHiveToOptiqColumnMap(out_rwsch, selRel));
this.m_relToHiveRR.put(selRel, out_rwsch);
return selRel;
}
private RelNode genLogicalPlan(QBExpr qbexpr) throws SemanticException {
if (qbexpr.getOpcode() == QBExpr.Opcode.NULLOP) {
return genLogicalPlan(qbexpr.getQB());
}
if (qbexpr.getOpcode() == QBExpr.Opcode.UNION) {
RelNode qbexpr1Ops = genLogicalPlan(qbexpr.getQBExpr1());
RelNode qbexpr2Ops = genLogicalPlan(qbexpr.getQBExpr2());
return genUnionLogicalPlan(qbexpr.getAlias(), qbexpr.getQBExpr1()
.getAlias(), qbexpr1Ops, qbexpr.getQBExpr2().getAlias(), qbexpr2Ops);
}
return null;
}
private RelNode genLogicalPlan(QB qb) throws SemanticException {
RelNode srcRel = null;
RelNode filterRel = null;
RelNode gbRel = null;
RelNode gbHavingRel = null;
RelNode havingRel = null;
RelNode selectRel = null;
RelNode obRel = null;
RelNode limitRel = null;
RelNode rootRel = null;
// First generate all the opInfos for the elements in the from clause
Map<String, RelNode> aliasToRel = new HashMap<String, RelNode>();
// 1. Build Rel For Src (SubQuery, TS, Join)
// 1.1. Recurse over the subqueries to fill the subquery part of the plan
for (String subqAlias : qb.getSubqAliases()) {
QBExpr qbexpr = qb.getSubqForAlias(subqAlias);
aliasToRel.put(subqAlias, genLogicalPlan(qbexpr));
qbexpr.setAlias(subqAlias);
}
// 1.2 Recurse over all the source tables
for (String tableAlias : qb.getTabAliases()) {
RelNode op = genTableLogicalPlan(tableAlias, qb);
aliasToRel.put(tableAlias, op);
}
// 1.3 process join
if (qb.getParseInfo().getJoinExpr() != null) {
srcRel = genJoinLogicalPlan(qb.getParseInfo().getJoinExpr(), aliasToRel);
} else {
// If no join then there should only be either 1 TS or 1 SubQuery
srcRel = aliasToRel.values().iterator().next();
}
// 2. Build Rel for where Clause
filterRel = genFilterLogicalPlan(qb, srcRel);
srcRel = (filterRel == null) ? srcRel : filterRel;
// 3. Build Rel for GB Clause
gbRel = genGBLogicalPlan(qb, srcRel);
srcRel = (gbRel == null) ? srcRel : gbRel;
// 4. Build Rel for GB Having Clause
gbHavingRel = genGBHavingLogicalPlan(qb, srcRel);
srcRel = (gbHavingRel == null) ? srcRel : gbHavingRel;
// 5. Build Rel for Select Clause
selectRel = genSelectLogicalPlan(qb, srcRel);
srcRel = (selectRel == null) ? srcRel : selectRel;
// 6. Incase this QB corresponds to subquery then modify its RR to point
// to subquery alias
// TODO: cleanup this
if (qb.getParseInfo().getAlias() != null) {
RowResolver rr = this.m_relToHiveRR.get(srcRel);
RowResolver newRR = new RowResolver();
String alias = qb.getParseInfo().getAlias();
for (ColumnInfo colInfo : rr.getColumnInfos()) {
String name = colInfo.getInternalName();
String[] tmp = rr.reverseLookup(name);
if ("".equals(tmp[0]) || tmp[1] == null) {
// ast expression is not a valid column name for table
tmp[1] = colInfo.getInternalName();
}
ColumnInfo newCi = new ColumnInfo(colInfo);
newCi.setTabAlias(alias);
newRR.put(alias, tmp[1], newCi);
}
m_relToHiveRR.put(srcRel, newRR);
m_relToHiveColNameOptiqPosMap.put(srcRel,
buildHiveToOptiqColumnMap(newRR, srcRel));
}
// 7. Build Rel for OB Clause
obRel = genOBLogicalPlan(qb, srcRel);
srcRel = (obRel == null) ? srcRel : obRel;
// 8. Build Rel for Limit Clause
limitRel = genLimitLogicalPlan(qb, srcRel);
srcRel = (limitRel == null) ? srcRel : limitRel;
if (LOG.isDebugEnabled()) {
LOG.debug("Created Plan for Query Block " + qb.getId());
}
return srcRel;
}
private RelNode genGBHavingLogicalPlan(QB qb, RelNode srcRel)
throws SemanticException {
RelNode gbFilter = null;
QBParseInfo qbp = getQBParseInfo(qb);
ASTNode havingClause = qbp.getHavingForClause(qbp.getClauseNames()
.iterator().next());
if (havingClause != null)
gbFilter = genFilterRelNode((ASTNode) havingClause.getChild(0), srcRel);
return gbFilter;
}
private ImmutableMap<String, Integer> buildHiveToOptiqColumnMap(
RowResolver rr, RelNode rNode) {
ImmutableMap.Builder<String, Integer> b = new ImmutableMap.Builder<String, Integer>();
int i = 0;
for (ColumnInfo ci : rr.getRowSchema().getSignature()) {
b.put(ci.getInternalName(), rr.getPosition(ci.getInternalName()));
}
return b.build();
}
private ImmutableMap<String, Integer> buildHiveColNameToInputPosMap(
List<ExprNodeDesc> col_list, RowResolver inputRR) {
// Build a map of Hive column Names (ExprNodeColumnDesc Name)
// to the positions of those projections in the input
Map<Integer, ExprNodeDesc> hashCodeTocolumnDescMap = new HashMap<Integer, ExprNodeDesc>();
ExprNodeDescUtils
.getExprNodeColumnDesc(col_list, hashCodeTocolumnDescMap);
ImmutableMap.Builder<String, Integer> hiveColNameToInputPosMapBuilder = new ImmutableMap.Builder<String, Integer>();
String exprNodecolName;
for (ExprNodeDesc exprDesc : hashCodeTocolumnDescMap.values()) {
exprNodecolName = ((ExprNodeColumnDesc) exprDesc).getColumn();
hiveColNameToInputPosMapBuilder.put(exprNodecolName,
inputRR.getPosition(exprNodecolName));
}
return hiveColNameToInputPosMapBuilder.build();
}
private QBParseInfo getQBParseInfo(QB qb) {
QBParseInfo qbp = qb.getParseInfo();
if (qbp.getClauseNames().size() > 1)
throw new RuntimeException("Multi Insert is not supported");
return qbp;
}
private List<String> getTabAliases(RowResolver inputRR) {
List<String> tabAliases = new ArrayList<String>();
for (ColumnInfo ci : inputRR.getColumnInfos()) {
tabAliases.add(ci.getTabAlias());
}
return tabAliases;
}
}
}
| HIVE-7310: Turning CBO on results in NPE on some queries (Laljo John Pullokkaran via Gunther Hagleitner)
git-svn-id: d4dbdacc005fc251eace6736a786023c8d74470d@1606276 13f79535-47bb-0310-9956-ffa450edef68
| ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java | HIVE-7310: Turning CBO on results in NPE on some queries (Laljo John Pullokkaran via Gunther Hagleitner) |
|
Java | bsd-2-clause | 2063f58625791451b4ae076c0d9be31044f63067 | 0 | schnej7/Blue-Mesh,schnej7/Blue-Mesh | package blue.mesh;
import java.util.UUID;
import android.bluetooth.BluetoothAdapter;
import android.util.Log;
public class BlueMeshService {
private BluetoothAdapter adapter;
private RouterObject router;
private ServerThread serverThread;
private ClientThread clientThread;
private static final String TAG = "BlueMesh Service";
// BMS constructor
public BlueMeshService(UUID a_uuid) throws NullPointerException {
// Gets bluetooth hardware from phone and makes sure that it is
// non-null;
adapter = BluetoothAdapter.getDefaultAdapter();
// If bluetooth hardware does not exist...
if (adapter == null) {
if (Constants.DEBUG)
Log.d(TAG, "BluetoothAdapter is null");
throw new NullPointerException("BluetoothAdapter is null");
} else {
if (Constants.DEBUG)
Log.d(TAG, "BluetoothAdapter is is non-null");
}
//Try to restart bluetooth
if(adapter.isEnabled()){
Log.d(TAG, "disable");
if(adapter.disable()){
Log.d(TAG, "waiting...");
while( adapter.isEnabled()){}
}
else{
Log.d(TAG, "failed");
}
}
Log.d(TAG, "enable");
if(adapter.enable()){
Log.d(TAG, "waiting...");
while( !adapter.isEnabled()){}
}
else{
Log.d(TAG, "failed");
}
// Create a new router object
router = new RouterObject();
if (Constants.DEBUG)
Log.d(TAG, "Router Object Created");
// Try to create a new ServerThread
try {
serverThread = new ServerThread(adapter, router, a_uuid);
} catch (NullPointerException e) {
throw e;
}
if (Constants.DEBUG)
Log.d(TAG, "Sever Thread Created");
// Create a new clientThread
clientThread = new ClientThread(adapter, router, a_uuid);
if (Constants.DEBUG)
Log.d(TAG, "Client Thread Created");
}
// TODO: Implement later if needed
public int config() {
return Constants.SUCCESS;
}
public int launch() {
// TODO: Conditionals are untested
if (!serverThread.isAlive()) {
serverThread.start();
}
if (!clientThread.isAlive()) {
clientThread.start();
}
return Constants.SUCCESS;
}
// function that writes message to devices
public int write(byte[] buffer) {
router.write(buffer, Constants.BYTE_LEVEL_USER);
return Constants.SUCCESS;
}
// function to grab most recent message off of message queue
// (message stack actually a linked list but is used like a queue)
public byte[] pull() {
return router.getNextMessage();
}
// TODO: This does not work
public int getNumberOfDevicesOnNetwork() {
return router.getNumberOfDevicesOnNetwork();
}
// Returns the Bluetooth name of the device
public String getMyDeviceName() {
return adapter.getName();
}
// Kills threads and stops all communications
public int disconnect() {
Log.d(TAG, "kill start");
// TODO: check if conditionals fixes bug
// disconnecting when bluetooth not enabeled
if (this.clientThread != null) {
this.clientThread.kill();
this.clientThread = null;
}
if (this.serverThread != null) {
this.serverThread.kill();
this.serverThread = null;
}
if (this.router != null) {
this.router.stop();
this.router = null;
}
Log.d(TAG, "kill success");
return Constants.SUCCESS;
}
}
| BlueMesh/src/blue/mesh/BlueMeshService.java | package blue.mesh;
import java.util.UUID;
import android.bluetooth.BluetoothAdapter;
import android.util.Log;
public class BlueMeshService {
private BluetoothAdapter adapter;
private RouterObject router;
private ServerThread serverThread;
private ClientThread clientThread;
private static final String TAG = "BlueMesh Service";
// BMS constructor
public BlueMeshService(UUID a_uuid) throws NullPointerException {
// Gets bluetooth hardware from phone and makes sure that it is
// non-null;
adapter = BluetoothAdapter.getDefaultAdapter();
// If bluetooth hardware does not exist...
if (adapter == null) {
if (Constants.DEBUG)
Log.d(TAG, "BluetoothAdapter is null");
throw new NullPointerException("BluetoothAdapter is null");
} else {
if (Constants.DEBUG)
Log.d(TAG, "BluetoothAdapter is is non-null");
}
// Create a new router object
router = new RouterObject();
if (Constants.DEBUG)
Log.d(TAG, "Router Object Created");
// Try to create a new ServerThread
try {
serverThread = new ServerThread(adapter, router, a_uuid);
} catch (NullPointerException e) {
throw e;
}
if (Constants.DEBUG)
Log.d(TAG, "Sever Thread Created");
// Create a new clientThread
clientThread = new ClientThread(adapter, router, a_uuid);
if (Constants.DEBUG)
Log.d(TAG, "Client Thread Created");
}
// TODO: Implement later if needed
public int config() {
return Constants.SUCCESS;
}
public int launch() {
// TODO: Conditionals are untested
if (!serverThread.isAlive()) {
serverThread.start();
}
if (!clientThread.isAlive()) {
clientThread.start();
}
return Constants.SUCCESS;
}
// function that writes message to devices
public int write(byte[] buffer) {
router.write(buffer, Constants.BYTE_LEVEL_USER);
return Constants.SUCCESS;
}
// function to grab most recent message off of message queue
// (message stack actually a linked list but is used like a queue)
public byte[] pull() {
return router.getNextMessage();
}
// TODO: This does not work
public int getNumberOfDevicesOnNetwork() {
return router.getNumberOfDevicesOnNetwork();
}
// Returns the Bluetooth name of the device
public String getMyDeviceName() {
return adapter.getName();
}
// Kills threads and stops all communications
public int disconnect() {
Log.d(TAG, "kill start");
// TODO: check if conditionals fixes bug
// disconnecting when bluetooth not enabeled
if (this.clientThread != null) {
this.clientThread.kill();
}
if (this.serverThread != null) {
this.serverThread.kill();
}
if (this.router != null) {
this.router.stop();
}
Log.d(TAG, "kill success");
return Constants.SUCCESS;
}
}
| Attempt to restart Bluetooth when starting BlueMeshService
| BlueMesh/src/blue/mesh/BlueMeshService.java | Attempt to restart Bluetooth when starting BlueMeshService |
|
Java | bsd-3-clause | 16201473f1b3736fcd9c60a8f723fbad368dd9a2 | 0 | rsmudge/armitage,rsmudge/armitage,rsmudge/armitage | package ui;
import java.awt.*;
import java.awt.event.*;
import java.util.*;
import javax.swing.SwingUtilities;
public class KeyBindings implements KeyEventDispatcher {
protected Map bindings = new HashMap();
private static class ExecuteBinding implements Runnable {
protected String binding;
protected KeyHandler handler;
public ExecuteBinding(String b, KeyHandler h) {
binding = b;
handler = h;
}
public void run() {
handler.key_pressed(binding);
}
}
public KeyBindings() {
}
public void bind(String description, KeyHandler handler) {
synchronized (this) {
bindings.put(description, handler);
}
}
public boolean dispatchKeyEvent(KeyEvent ev) {
StringBuffer description = new StringBuffer();
if (ev.getModifiers() != 0) {
description.append(getKeyModifiers(ev));
}
description.append(getKeyText(ev));
synchronized (this) {
if (bindings.containsKey(description.toString())) {
ev.consume();
if (ev.getID() != KeyEvent.KEY_PRESSED) {
return false;
}
else {
SwingUtilities.invokeLater(new ExecuteBinding(description.toString(), (KeyHandler)bindings.get(description.toString())));
return true;
}
}
}
return false;
}
private static String getKeyModifiers(KeyEvent ev) {
StringBuffer modifiers = new StringBuffer();
if (ev.isShiftDown())
modifiers.append("Shift+");
if (ev.isControlDown())
modifiers.append("Ctrl+");
if (ev.isAltDown())
modifiers.append("Alt+");
if (ev.isMetaDown())
modifiers.append("Meta+");
return modifiers.toString();
}
private static String getKeyText(KeyEvent ev) {
switch (ev.getKeyCode()) {
case KeyEvent.VK_ACCEPT:
return "Accept";
case KeyEvent.VK_BACK_QUOTE:
return "Back_Quote";
case KeyEvent.VK_BACK_SPACE:
return "Backspace";
case KeyEvent.VK_CAPS_LOCK:
return "Caps_Lock";
case KeyEvent.VK_CLEAR:
return "Clear";
case KeyEvent.VK_CONVERT:
return "Convert";
case KeyEvent.VK_DELETE:
return "Delete";
case KeyEvent.VK_DOWN:
return "Down";
case KeyEvent.VK_END:
return "End";
case KeyEvent.VK_ENTER:
return "Enter";
case KeyEvent.VK_ESCAPE:
return "Escape";
case KeyEvent.VK_F1:
return "F1";
case KeyEvent.VK_F2:
return "F2";
case KeyEvent.VK_F3:
return "F3";
case KeyEvent.VK_F4:
return "F4";
case KeyEvent.VK_F5:
return "F5";
case KeyEvent.VK_F6:
return "F6";
case KeyEvent.VK_F7:
return "F7";
case KeyEvent.VK_F8:
return "F8";
case KeyEvent.VK_F9:
return "F9";
case KeyEvent.VK_F10:
return "F10";
case KeyEvent.VK_F11:
return "F11";
case KeyEvent.VK_F12:
return "F12";
case KeyEvent.VK_FINAL:
return "Final";
case KeyEvent.VK_HELP:
return "Help";
case KeyEvent.VK_HOME:
return "Home";
case KeyEvent.VK_INSERT:
return "Insert";
case KeyEvent.VK_LEFT:
return "Left";
case KeyEvent.VK_NUM_LOCK:
return "Num_Lock";
case KeyEvent.VK_MULTIPLY:
return "NumPad_*";
case KeyEvent.VK_PLUS:
return "NumPad_+";
case KeyEvent.VK_COMMA:
return "NumPad_,";
case KeyEvent.VK_SUBTRACT:
return "NumPad_-";
case KeyEvent.VK_PERIOD:
return "Period";
case KeyEvent.VK_SLASH:
return "NumPad_/";
case KeyEvent.VK_PAGE_DOWN:
return "Page_Down";
case KeyEvent.VK_PAGE_UP:
return "Page_Up";
case KeyEvent.VK_PAUSE:
return "Pause";
case KeyEvent.VK_PRINTSCREEN:
return "Print_Screen";
case KeyEvent.VK_QUOTE:
return "Quote";
case KeyEvent.VK_RIGHT:
return "Right";
case KeyEvent.VK_SCROLL_LOCK:
return "Scroll_Lock";
case KeyEvent.VK_SPACE:
return "Space";
case KeyEvent.VK_TAB:
return "Tab";
case KeyEvent.VK_UP:
return "Up";
default:
return ev.getKeyText(ev.getKeyCode());
}
}
}
| src/ui/KeyBindings.java | package ui;
import java.awt.*;
import java.awt.event.*;
import java.util.*;
import javax.swing.SwingUtilities;
public class KeyBindings implements KeyEventDispatcher {
protected Map bindings = new HashMap();
private static class ExecuteBinding implements Runnable {
protected String binding;
protected KeyHandler handler;
public ExecuteBinding(String b, KeyHandler h) {
binding = b;
handler = h;
}
public void run() {
handler.key_pressed(binding);
}
}
public KeyBindings() {
}
public void bind(String description, KeyHandler handler) {
synchronized (this) {
bindings.put(description, handler);
}
}
public boolean dispatchKeyEvent(KeyEvent ev) {
if (ev.getID() != KeyEvent.KEY_PRESSED) {
return false;
}
StringBuffer description = new StringBuffer();
if (ev.getModifiers() != 0) {
description.append(getKeyModifiers(ev));
}
description.append(getKeyText(ev));
synchronized (this) {
if (bindings.containsKey(description.toString())) {
SwingUtilities.invokeLater(new ExecuteBinding(description.toString(), (KeyHandler)bindings.get(description.toString())));
ev.consume();
return true;
}
}
return false;
}
private static String getKeyModifiers(KeyEvent ev) {
StringBuffer modifiers = new StringBuffer();
if (ev.isShiftDown())
modifiers.append("Shift+");
if (ev.isControlDown())
modifiers.append("Ctrl+");
if (ev.isAltDown())
modifiers.append("Alt+");
if (ev.isMetaDown())
modifiers.append("Meta+");
return modifiers.toString();
}
private static String getKeyText(KeyEvent ev) {
switch (ev.getKeyCode()) {
case KeyEvent.VK_ACCEPT:
return "Accept";
case KeyEvent.VK_BACK_QUOTE:
return "Back_Quote";
case KeyEvent.VK_BACK_SPACE:
return "Backspace";
case KeyEvent.VK_CAPS_LOCK:
return "Caps_Lock";
case KeyEvent.VK_CLEAR:
return "Clear";
case KeyEvent.VK_CONVERT:
return "Convert";
case KeyEvent.VK_DELETE:
return "Delete";
case KeyEvent.VK_DOWN:
return "Down";
case KeyEvent.VK_END:
return "End";
case KeyEvent.VK_ENTER:
return "Enter";
case KeyEvent.VK_ESCAPE:
return "Escape";
case KeyEvent.VK_F1:
return "F1";
case KeyEvent.VK_F2:
return "F2";
case KeyEvent.VK_F3:
return "F3";
case KeyEvent.VK_F4:
return "F4";
case KeyEvent.VK_F5:
return "F5";
case KeyEvent.VK_F6:
return "F6";
case KeyEvent.VK_F7:
return "F7";
case KeyEvent.VK_F8:
return "F8";
case KeyEvent.VK_F9:
return "F9";
case KeyEvent.VK_F10:
return "F10";
case KeyEvent.VK_F11:
return "F11";
case KeyEvent.VK_F12:
return "F12";
case KeyEvent.VK_FINAL:
return "Final";
case KeyEvent.VK_HELP:
return "Help";
case KeyEvent.VK_HOME:
return "Home";
case KeyEvent.VK_INSERT:
return "Insert";
case KeyEvent.VK_LEFT:
return "Left";
case KeyEvent.VK_NUM_LOCK:
return "Num_Lock";
case KeyEvent.VK_MULTIPLY:
return "NumPad_*";
case KeyEvent.VK_PLUS:
return "NumPad_+";
case KeyEvent.VK_COMMA:
return "NumPad_,";
case KeyEvent.VK_SUBTRACT:
return "NumPad_-";
case KeyEvent.VK_PERIOD:
return "Period";
case KeyEvent.VK_SLASH:
return "NumPad_/";
case KeyEvent.VK_PAGE_DOWN:
return "Page_Down";
case KeyEvent.VK_PAGE_UP:
return "Page_Up";
case KeyEvent.VK_PAUSE:
return "Pause";
case KeyEvent.VK_PRINTSCREEN:
return "Print_Screen";
case KeyEvent.VK_QUOTE:
return "Quote";
case KeyEvent.VK_RIGHT:
return "Right";
case KeyEvent.VK_SCROLL_LOCK:
return "Scroll_Lock";
case KeyEvent.VK_SPACE:
return "Space";
case KeyEvent.VK_TAB:
return "Tab";
case KeyEvent.VK_UP:
return "Up";
default:
return ev.getKeyText(ev.getKeyCode());
}
}
}
| change how events are swallowed by shortcut bindings.
| src/ui/KeyBindings.java | change how events are swallowed by shortcut bindings. |
|
Java | apache-2.0 | 9edc8cb7bd6243ff6a97492b87bc89ba418959f7 | 0 | MaxRau/CoffeeMud,oriontribunal/CoffeeMud,MaxRau/CoffeeMud,MaxRau/CoffeeMud,oriontribunal/CoffeeMud,bozimmerman/CoffeeMud,sfunk1x/CoffeeMud,sfunk1x/CoffeeMud,oriontribunal/CoffeeMud,Tycheo/coffeemud,bozimmerman/CoffeeMud,sfunk1x/CoffeeMud,Tycheo/coffeemud,bozimmerman/CoffeeMud,oriontribunal/CoffeeMud,Tycheo/coffeemud,bozimmerman/CoffeeMud,sfunk1x/CoffeeMud,MaxRau/CoffeeMud,Tycheo/coffeemud | com/planet_ink/coffee_mud/core/Dice.java | package com.planet_ink.coffee_mud.core;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2000-2005 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Dice
{
private static Random randomizer = null;
public static void seed()
{
randomizer = new Random(System.currentTimeMillis());
}
public static boolean normalizeAndRollLess(int score)
{
return (rollPercentage()<normalizeBy5(score));
}
public static int normalizeBy5(int score)
{
if(score>95)
return 95;
else
if(score<5)
return 5;
return score;
}
public static int rollHP(int level, int code)
{
int mul=1;
if(code<0)
{
code=code*-1;
mul=-1;
}
// old style
if(code<32768) return 10
+(int)Math.round(Util.mul(level*level,0.85))
+(Dice.roll(level,code,0)*mul);
// new style
int r=code>>23;
int d=(code-(r<<23))>>15;
int p=(((code-(r<<23))-(d<<15)))*mul;
return Dice.roll(r,d,p);
}
public static int getHPCode(String str)
{
int i=str.indexOf("d");
if(i<0) return 11;
int roll=Util.s_int(str.substring(0,i).trim());
str=str.substring(i+1).trim();
i=str.indexOf("+");
int dice=0;
int plus=0;
if(i<0)
{
i=str.indexOf("-");
if(i<0)
dice=Util.s_int(str.trim());
else
{
dice=Util.s_int(str.substring(0,i).trim());
plus=Util.s_int(str.substring(i));
}
}
else
{
dice=Util.s_int(str.substring(0,i).trim());
plus=Util.s_int(str.substring(i+1));
}
return getHPCode(roll,dice,plus);
}
public static int getHPCode(int roll, int dice, int plus)
{
if(roll<=0) roll=1;
if(dice<=0) dice=0;
if(roll>255)
{
int diff=roll-255;
roll=255;
plus+=(diff*dice)/2;
}
if(dice>255)
{
int diff=dice-255;
dice=255;
plus+=(diff*roll)/2;
}
int mul=1;
if(plus<0)
{
plus=plus*-1;
mul=-1;
}
if(plus>32768) plus=32768;
return (plus+(dice<<15)+(roll<<(23)))*mul;
}
public static int[] getHPBreakup(int level, int code)
{
int mul=1;
if(code<0)
{
code=code*-1;
mul=-1;
}
int stuff[]=new int[3];
// old style
if(code<32768)
{
stuff[0]=level;
stuff[1]=(code*mul);
stuff[2]=(int)Math.round(Util.mul(level*level,0.85));
}
else
{
// new style
int r=code>>23;
int d=(code-(r<<23))>>15;
int p=(((code-(r<<23))-(d<<15)))*mul;
stuff[0]=r;
stuff[1]=d;
stuff[2]=p;
}
return stuff;
}
public static int roll(int number, int die, int modifier)
{
if (randomizer == null)
seed();
int total = 0;
if(die>0)
for (int i = 0; i < number; i++)
total += (Math.abs(randomizer.nextInt() % die)) + 1;
total += modifier;
return total;
}
public static int rollPercentage()
{
if (randomizer == null)
seed();
return (Math.abs(randomizer.nextInt() % 100)) + 1;
}
}
| svnimporter generated comment: Deleted . Changed path: [/Version 4.7/com/planet_ink/coffee_mud/core/Dice.java (deleted)]
git-svn-id: 0cdf8356e41b2d8ccbb41bb76c82068fe80b2514@5294 0d6f1817-ed0e-0410-87c9-987e46238f29
| com/planet_ink/coffee_mud/core/Dice.java | svnimporter generated comment: Deleted . Changed path: [/Version 4.7/com/planet_ink/coffee_mud/core/Dice.java (deleted)] |
||
Java | apache-2.0 | 3331870e1c334bc09352f5e988b1d5f7451dd2c7 | 0 | pwachira/droolsexamples,mrietveld/drools,ChallenHB/drools,Buble1981/MyDroolsFork,reynoldsm88/drools,ThiagoGarciaAlves/drools,Buble1981/MyDroolsFork,ThiagoGarciaAlves/drools,amckee23/drools,ngs-mtech/drools,HHzzhz/drools,Buble1981/MyDroolsFork,reynoldsm88/drools,ThiagoGarciaAlves/drools,vinodkiran/drools,reynoldsm88/drools,manstis/drools,OnePaaS/drools,winklerm/drools,prabasn/drools,OnePaaS/drools,kedzie/drools-android,jiripetrlik/drools,ThomasLau/drools,ngs-mtech/drools,ngs-mtech/drools,mrietveld/drools,292388900/drools,TonnyFeng/drools,winklerm/drools,rajashekharmunthakewill/drools,vinodkiran/drools,jiripetrlik/drools,kevinpeterson/drools,ChallenHB/drools,reynoldsm88/drools,vinodkiran/drools,sotty/drools,lanceleverich/drools,ChallenHB/drools,iambic69/drools,ThomasLau/drools,liupugong/drools,vinodkiran/drools,kevinpeterson/drools,winklerm/drools,prabasn/drools,mrietveld/drools,jomarko/drools,sotty/drools,lanceleverich/drools,ChallenHB/drools,sutaakar/drools,mrrodriguez/drools,ngs-mtech/drools,rajashekharmunthakewill/drools,mrrodriguez/drools,sotty/drools,romartin/drools,ThomasLau/drools,iambic69/drools,TonnyFeng/drools,rajashekharmunthakewill/drools,OnePaaS/drools,amckee23/drools,sotty/drools,kevinpeterson/drools,HHzzhz/drools,kedzie/drools-android,romartin/drools,mrrodriguez/drools,winklerm/drools,ThomasLau/drools,liupugong/drools,kevinpeterson/drools,droolsjbpm/drools,jomarko/drools,HHzzhz/drools,jomarko/drools,sutaakar/drools,mrrodriguez/drools,liupugong/drools,rajashekharmunthakewill/drools,jomarko/drools,sutaakar/drools,iambic69/drools,prabasn/drools,winklerm/drools,lanceleverich/drools,iambic69/drools,droolsjbpm/drools,kedzie/drools-android,ngs-mtech/drools,romartin/drools,sotty/drools,rajashekharmunthakewill/drools,prabasn/drools,jiripetrlik/drools,reynoldsm88/drools,droolsjbpm/drools,droolsjbpm/drools,vinodkiran/drools,kedzie/drools-android,romartin/drools,amckee23/drools,lanceleverich/drools,292388900/drools,romartin/drools,jiripetrlik/drools,ThiagoGarciaAlves/drools,ChallenHB/drools,HHzzhz/drools,OnePaaS/drools,manstis/drools,sutaakar/drools,liupugong/drools,jiripetrlik/drools,sutaakar/drools,TonnyFeng/drools,mrrodriguez/drools,jomarko/drools,kedzie/drools-android,TonnyFeng/drools,mrietveld/drools,TonnyFeng/drools,amckee23/drools,prabasn/drools,292388900/drools,manstis/drools,mrietveld/drools,droolsjbpm/drools,liupugong/drools,HHzzhz/drools,manstis/drools,ThomasLau/drools,kevinpeterson/drools,manstis/drools,lanceleverich/drools,ThiagoGarciaAlves/drools,amckee23/drools,292388900/drools,Buble1981/MyDroolsFork,iambic69/drools,292388900/drools,OnePaaS/drools | drools-core/src/main/java/org/drools/visualize/package-info.java | /*
* Copyright 2013 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* <body>
* Some visual utilities for viewing RETE networks (can also use the plug in for this)
* </body>
*/
package org.drools.visualize;
| Delete dead package
| drools-core/src/main/java/org/drools/visualize/package-info.java | Delete dead package |
||
Java | bsd-3-clause | b524d5813a1cf57a8cd64b81a354d9df039724c4 | 0 | applidium/Shutterbug | Shutterbug/src/com/applidium/shutterbug/FetchableOverlayItem.java | package com.applidium.shutterbug;
import android.R;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.ColorDrawable;
import android.graphics.drawable.Drawable;
import com.applidium.shutterbug.utils.ShutterbugManager;
import com.applidium.shutterbug.utils.ShutterbugManager.ShutterbugManagerListener;
import com.google.android.maps.GeoPoint;
import com.google.android.maps.OverlayItem;
public class FetchableOverlayItem extends OverlayItem implements ShutterbugManagerListener {
public interface FetchableOverlayItemListener {
// You may want to invalidate the MapView instance here
void onImageFetched(Bitmap bitmap, String url);
void onImageFailure(String url);
}
private FetchableOverlayItemListener mListener;
private Context mContext;
public FetchableOverlayItem(Context context, GeoPoint point, String title, String snippet) {
super(point, title, snippet);
mContext = context;
}
public void setImage(String url) {
setImage(url, new ColorDrawable(mContext.getResources().getColor(R.color.transparent)));
}
public void setImage(String url, int placeholderDrawableId) {
setImage(url, mContext.getResources().getDrawable(placeholderDrawableId));
}
public void setImage(String url, Drawable placeholderDrawable) {
final ShutterbugManager manager = ShutterbugManager.getSharedImageManager(mContext);
manager.cancel(this);
setDrawable(placeholderDrawable);
if (url != null) {
manager.download(url, this);
}
}
public FetchableOverlayItemListener getListener() {
return mListener;
}
public void setListener(FetchableOverlayItemListener listener) {
mListener = listener;
}
@Override
public void onImageSuccess(ShutterbugManager imageManager, Bitmap bitmap, String url) {
setDrawable(new BitmapDrawable(mContext.getResources(), bitmap));
if (mListener != null) {
mListener.onImageFetched(bitmap, url);
}
}
@Override
public void onImageFailure(ShutterbugManager imageManager, String url) {
if (mListener != null) {
mListener.onImageFailure(url);
}
}
private void setDrawable(Drawable drawable) {
drawable.setBounds(drawable.getIntrinsicWidth() / -4, drawable.getIntrinsicHeight() / -4, drawable.getIntrinsicWidth() / 4,
drawable.getIntrinsicHeight() / 4);
setMarker(drawable);
}
}
| Support for Google Maps v1 is now removed | Shutterbug/src/com/applidium/shutterbug/FetchableOverlayItem.java | Support for Google Maps v1 is now removed |
||
Java | mit | afb8c88c873ee55da9b8601a3d481ede70185b0f | 0 | linuxserver/davos,linuxserver/davos,linuxserver/davos | package io.linuxserver.davos.delegation.services;
import java.util.List;
import java.util.stream.Collectors;
import javax.annotation.Resource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import io.linuxserver.davos.converters.HostConverter;
import io.linuxserver.davos.persistence.dao.HostDAO;
import io.linuxserver.davos.persistence.dao.ScheduleDAO;
import io.linuxserver.davos.persistence.model.HostModel;
import io.linuxserver.davos.transfer.ftp.client.Client;
import io.linuxserver.davos.transfer.ftp.client.ClientFactory;
import io.linuxserver.davos.transfer.ftp.client.UserCredentials;
import io.linuxserver.davos.transfer.ftp.client.UserCredentials.Identity;
import io.linuxserver.davos.web.Host;
@Component
public class HostServiceImpl implements HostService {
private static final Logger LOGGER = LoggerFactory.getLogger(HostServiceImpl.class);
@Resource
private HostDAO hostDAO;
@Resource
private ScheduleDAO scheduleDAO;
@Resource
private HostConverter hostConverter;
@Override
public Host fetchHost(Long id) {
return toHost(hostDAO.fetchHost(id));
}
@Override
public Host saveHost(Host host) {
HostModel model = hostConverter.convertFrom(host);
return hostConverter.convertTo(hostDAO.saveHost(model));
}
@Override
public void deleteHost(Long id) {
hostDAO.deleteHost(id);
}
@Override
public List<Host> fetchAllHosts() {
return hostDAO.fetchAllHosts().stream().map(this::toHost).collect(Collectors.toList());
}
private Host toHost(HostModel model) {
return hostConverter.convertTo(model);
}
@Override
public List<Long> fetchSchedulesUsingHost(Long id) {
return scheduleDAO.fetchSchedulesUsingHost(id).stream().map(s -> s.id).collect(Collectors.toList());
}
@Override
public void testConnection(Host host) {
HostModel model = hostConverter.convertFrom(host);
LOGGER.info("Attempting to test connection to host", model.address);
Client client = new ClientFactory().getClient(model.protocol);
LOGGER.debug("Credentials: {} : {}", model.username, model.password);
UserCredentials userCredentials;
if (model.isIdentityFileEnabled())
userCredentials = new UserCredentials(model.username, new Identity(model.identityFile));
else
userCredentials = new UserCredentials(model.username, model.password);
client.setCredentials(userCredentials);
client.setHost(model.address);
client.setPort(model.port);
LOGGER.debug("Making connection on port {}", model.port);
client.connect();
LOGGER.info("Connection successful.");
client.disconnect();
LOGGER.debug("Disconnected");
}
}
| src/main/java/io/linuxserver/davos/delegation/services/HostServiceImpl.java | package io.linuxserver.davos.delegation.services;
import java.util.List;
import java.util.stream.Collectors;
import javax.annotation.Resource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import io.linuxserver.davos.converters.HostConverter;
import io.linuxserver.davos.persistence.dao.HostDAO;
import io.linuxserver.davos.persistence.dao.ScheduleDAO;
import io.linuxserver.davos.persistence.model.HostModel;
import io.linuxserver.davos.transfer.ftp.client.Client;
import io.linuxserver.davos.transfer.ftp.client.ClientFactory;
import io.linuxserver.davos.transfer.ftp.client.UserCredentials;
import io.linuxserver.davos.web.Host;
@Component
public class HostServiceImpl implements HostService {
private static final Logger LOGGER = LoggerFactory.getLogger(HostServiceImpl.class);
@Resource
private HostDAO hostDAO;
@Resource
private ScheduleDAO scheduleDAO;
@Resource
private HostConverter hostConverter;
@Override
public Host fetchHost(Long id) {
return toHost(hostDAO.fetchHost(id));
}
@Override
public Host saveHost(Host host) {
HostModel model = hostConverter.convertFrom(host);
return hostConverter.convertTo(hostDAO.saveHost(model));
}
@Override
public void deleteHost(Long id) {
hostDAO.deleteHost(id);
}
@Override
public List<Host> fetchAllHosts() {
return hostDAO.fetchAllHosts().stream().map(this::toHost).collect(Collectors.toList());
}
private Host toHost(HostModel model) {
return hostConverter.convertTo(model);
}
@Override
public List<Long> fetchSchedulesUsingHost(Long id) {
return scheduleDAO.fetchSchedulesUsingHost(id).stream().map(s -> s.id).collect(Collectors.toList());
}
@Override
public void testConnection(Host host) {
HostModel model = hostConverter.convertFrom(host);
LOGGER.info("Attempting to test connection to host", model.address);
Client client = new ClientFactory().getClient(model.protocol);
LOGGER.debug("Credentials: {} : {}", model.username, model.password);
client.setCredentials(new UserCredentials(model.username, model.password));
client.setHost(model.address);
client.setPort(model.port);
LOGGER.debug("Making connection on port {}", model.port);
client.connect();
LOGGER.info("Connection successful.");
client.disconnect();
LOGGER.debug("Disconnected");
}
}
| Updated connection test to be compatible with identity file
| src/main/java/io/linuxserver/davos/delegation/services/HostServiceImpl.java | Updated connection test to be compatible with identity file |
|
Java | mit | 1a78a7c3649e96343343186f045fde3178ab4a05 | 0 | Parnswir/unmp | package com.parnswir.unmp;
import java.io.IOException;
import android.app.Notification;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.media.AudioManager;
import android.media.AudioManager.OnAudioFocusChangeListener;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnErrorListener;
import android.media.MediaPlayer.OnPreparedListener;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.IBinder;
import android.os.Looper;
import android.os.Message;
import android.os.Process;
import android.support.v4.app.NotificationCompat;
import android.support.v4.app.NotificationCompat.Builder;
public class PlayerService extends Service implements OnAudioFocusChangeListener {
public static final String EXTRA_ID = "state";
public static final int STOP = 0;
public static final int START = 1;
public static final int PLAY = 2;
public static final int PAUSE = 3;
private Looper mServiceLooper;
private ServiceHandler mServiceHandler;
private MediaPlayer player;
private boolean playerIsPaused = false;
private final class ServiceHandler extends Handler {
public ServiceHandler(Looper looper) {
super(looper);
}
@Override
public void handleMessage(Message msg) {
switch (msg.arg2) {
case STOP: stopSelf(msg.arg1); stopForeground(true); stop(); break;
case START: setForeground(); break;
case PLAY: play("file:///storage/sdcard0/Music/Awolnation/Megalithic Symphony/10 Sail.mp3"); break;
case PAUSE: pause(); break;
}
}
}
@Override
public void onCreate() {
HandlerThread thread = new HandlerThread("ServiceStartArguments", Process.THREAD_PRIORITY_AUDIO);
thread.start();
mServiceLooper = thread.getLooper();
mServiceHandler = new ServiceHandler(mServiceLooper);
getAudioFocus();
initializeMediaPlayer();
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
int action = intent.getExtras().getInt(EXTRA_ID);
Message msg = mServiceHandler.obtainMessage();
msg.arg1 = startId;
msg.arg2 = action;
mServiceHandler.sendMessage(msg);
return START_STICKY;
}
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public void onDestroy() {
if (player != null) player.release();
}
private void setForeground() {
Intent notificationIntent = new Intent(this, MainActivity.class);
PendingIntent pendingIntent = PendingIntent.getActivity(this, 0, notificationIntent, 0);
Builder builder = new NotificationCompat.Builder(getApplicationContext())
.setSmallIcon(R.drawable.ic_action_play)
.setUsesChronometer(true)
.setContentIntent(pendingIntent)
.setContentTitle("Apotheosis")
.setContentText("Greendjohn")
.addAction(R.drawable.ic_action_play, "Play", pendingIntent)
.addAction(R.drawable.ic_action_next, "Next", pendingIntent);
Notification notification = builder.build();
startForeground(1, notification);
}
private void getAudioFocus() {
AudioManager audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
int result = audioManager.requestAudioFocus(this, AudioManager.STREAM_MUSIC,
AudioManager.AUDIOFOCUS_GAIN);
if (result != AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
// could not get audio focus.
}
}
private void initializeMediaPlayer() {
player = new MediaPlayer();
player.setAudioStreamType(AudioManager.STREAM_MUSIC);
player.setOnPreparedListener(new OnPreparedListener() {
@Override
public void onPrepared(MediaPlayer arg0) {
player.start();
}
});
player.setOnErrorListener(new OnErrorListener() {
@Override
public boolean onError(MediaPlayer mp, int error, int extra) {
return false;
}
});
}
private void play(String filePath) {
stop();
setPlayerDataSource(filePath);
preparePlayer();
}
private void pause() {
if (player.isPlaying()) {
player.pause();
playerIsPaused = true;
} else if (playerIsPaused) {
player.start();
playerIsPaused = false;
}
}
private void stop() {
if (player.isPlaying()) player.stop();
}
private void setPlayerDataSource(String filePath) {
try {
player.setDataSource(filePath);
} catch (IllegalArgumentException e1) {
e1.printStackTrace();
} catch (SecurityException e1) {
e1.printStackTrace();
} catch (IllegalStateException e1) {
e1.printStackTrace();
} catch (IOException e1) {
e1.printStackTrace();
}
}
private void preparePlayer() {
try {
player.prepareAsync();
} catch (IllegalStateException e) {
e.printStackTrace();
}
}
@Override
public void onAudioFocusChange(int focusChange) {
switch (focusChange) {
case AudioManager.AUDIOFOCUS_GAIN:
break;
case AudioManager.AUDIOFOCUS_LOSS:
break;
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
// Lost focus for a short time, but we have to stop
// playback. We don't release the media player because playback
// is likely to resume
break;
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
// Lost focus for a short time, but it's ok to keep playing
// at an attenuated level
break;
}
}
} | src/com/parnswir/unmp/PlayerService.java | package com.parnswir.unmp;
import java.io.IOException;
import android.app.Notification;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Intent;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.media.MediaPlayer.OnErrorListener;
import android.media.MediaPlayer.OnPreparedListener;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.IBinder;
import android.os.Looper;
import android.os.Message;
import android.os.Process;
import android.support.v4.app.NotificationCompat;
import android.support.v4.app.NotificationCompat.Builder;
public class PlayerService extends Service {
public static final String EXTRA_ID = "state";
public static final int STOP = 0;
public static final int START = 1;
public static final int PLAY = 2;
public static final int PAUSE = 3;
private Looper mServiceLooper;
private ServiceHandler mServiceHandler;
private MediaPlayer player;
private boolean playerIsPaused = false;
private final class ServiceHandler extends Handler {
public ServiceHandler(Looper looper) {
super(looper);
}
@Override
public void handleMessage(Message msg) {
switch (msg.arg2) {
case STOP: stopSelf(msg.arg1); stopForeground(true); stop(); break;
case START: setForeground(); break;
case PLAY: play("file:///storage/sdcard0/Music/Awolnation/Megalithic Symphony/10 Sail.mp3"); break;
case PAUSE: pause(); break;
}
}
}
@Override
public void onCreate() {
HandlerThread thread = new HandlerThread("ServiceStartArguments", Process.THREAD_PRIORITY_AUDIO);
thread.start();
mServiceLooper = thread.getLooper();
mServiceHandler = new ServiceHandler(mServiceLooper);
initializeMediaPlayer();
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
int action = intent.getExtras().getInt(EXTRA_ID);
Message msg = mServiceHandler.obtainMessage();
msg.arg1 = startId;
msg.arg2 = action;
mServiceHandler.sendMessage(msg);
return START_STICKY;
}
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public void onDestroy() {
}
private void setForeground() {
Intent notificationIntent = new Intent(this, MainActivity.class);
PendingIntent pendingIntent = PendingIntent.getActivity(this, 0, notificationIntent, 0);
Builder builder = new NotificationCompat.Builder(getApplicationContext())
.setSmallIcon(R.drawable.ic_action_play)
.setUsesChronometer(true)
.setContentIntent(pendingIntent)
.setContentTitle("Apotheosis")
.setContentText("Greendjohn")
.addAction(R.drawable.ic_action_play, "Play", pendingIntent)
.addAction(R.drawable.ic_action_next, "Next", pendingIntent);
Notification notification = builder.build();
startForeground(1, notification);
}
private void initializeMediaPlayer() {
player = new MediaPlayer();
player.setAudioStreamType(AudioManager.STREAM_MUSIC);
player.setOnPreparedListener(new OnPreparedListener() {
@Override
public void onPrepared(MediaPlayer arg0) {
player.start();
}
});
player.setOnErrorListener(new OnErrorListener() {
@Override
public boolean onError(MediaPlayer mp, int error, int extra) {
return false;
}
});
}
private void play(String filePath) {
stop();
setPlayerDataSource(filePath);
preparePlayer();
}
private void pause() {
if (player.isPlaying()) {
player.pause();
playerIsPaused = true;
} else if (playerIsPaused) {
player.start();
playerIsPaused = false;
}
}
private void stop() {
if (player.isPlaying()) player.stop();
}
private void setPlayerDataSource(String filePath) {
try {
player.setDataSource(filePath);
} catch (IllegalArgumentException e1) {
e1.printStackTrace();
} catch (SecurityException e1) {
e1.printStackTrace();
} catch (IllegalStateException e1) {
e1.printStackTrace();
} catch (IOException e1) {
e1.printStackTrace();
}
}
private void preparePlayer() {
try {
player.prepareAsync();
} catch (IllegalStateException e) {
e.printStackTrace();
}
}
} | Add audio focus listener
| src/com/parnswir/unmp/PlayerService.java | Add audio focus listener |
|
Java | mit | 367f6c941bc19bc1ecab13c826d83ae668a19618 | 0 | BrassGoggledCoders/Boilerplate | /**
* This class was created by BrassGoggledCoders modding team.
* This class is available as part of the BoilerCraft Mod for Minecraft.
*
* BoilerCraft is open-source and is distributed under the MMPL v1.0 License.
* (http://www.mod-buildcraft.com/MMPL-1.0.txt)
*
*/
package boilerplate.common.baseclasses;
import java.util.List;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.world.World;
import boilerplate.api.IEnergyItem;
/**
* @author decebaldecebal
*
*/
public abstract class BaseElectricItem extends RootItem implements IEnergyItem
{
protected int maxEnergy;
protected short maxReceive;
protected short maxSend;
public BaseElectricItem(int maxEnergy, int maxSend, int maxReceive)
{
super();
this.maxEnergy = maxEnergy * 1000;
this.maxReceive = (short) maxReceive;
this.maxSend = (short) maxSend;
this.setMaxStackSize(1);
}
@SuppressWarnings("all")
@Override
public void getSubItems(Item item, CreativeTabs tab, List list)
{
list.add(this.getUnchargedItem(item));
list.add(this.getChargedItem(item));
}
public ItemStack getUnchargedItem(Item item)
{
ItemStack uncharged = new ItemStack(item, 1, 20);
if (!uncharged.hasTagCompound())
{
uncharged.setTagCompound(new NBTTagCompound());
}
uncharged.getTagCompound().setInteger("energy", 0);
return uncharged.copy();
}
public ItemStack getChargedItem(Item item)
{
ItemStack charged = new ItemStack(item, 1, 0);
if (!charged.hasTagCompound())
{
charged.setTagCompound(new NBTTagCompound());
}
charged.getTagCompound().setInteger("energy", this.maxEnergy);
return charged.copy();
}
@SuppressWarnings("all")
@Override
public void addInformation(ItemStack stack, EntityPlayer entityplayer, List list, boolean flag)
{
list.add("Energy: " + (this.getEnergyStored(stack) / 1000) + "k / " + (this.maxEnergy / 1000) + "k");
list.add("Transfer(in/out): " + this.maxReceive + " / " + this.maxSend);
}
@Override
public void onCreated(ItemStack stack, World par2World, EntityPlayer par3EntityPlayer)
{
this.setEnergy(stack, 0);
}
public void setEnergy(ItemStack stack, int energy)
{
if (!stack.hasTagCompound())
{
stack.setTagCompound(new NBTTagCompound());
}
NBTTagCompound tag = stack.getTagCompound();
if (energy < 0)
{
energy = 0;
}
if (energy > this.maxEnergy)
{
energy = this.maxEnergy;
}
stack.setItemDamage(20 - ((energy * 20) / this.maxEnergy));
tag.setInteger("energy", energy);
stack.setTagCompound(tag);
}
@Override
public int receiveEnergy(ItemStack itemStack, int maxReceive, boolean simulate)
{
int received = Math.min(this.maxEnergy - this.getEnergyStored(itemStack), maxReceive);
received = Math.min(received, this.maxReceive);
if (!simulate)
{
this.setEnergy(itemStack, this.getEnergyStored(itemStack) + received);
}
return received;
}
@Override
public int extractEnergy(ItemStack itemStack, int maxExtract, boolean simulate)
{
int extracted = Math.min(this.getEnergyStored(itemStack), maxExtract);
extracted = Math.min(extracted, this.maxSend);
if (!simulate)
{
this.setEnergy(itemStack, this.getEnergyStored(itemStack) - extracted);
}
return extracted;
}
@Override
public int getEnergyStored(ItemStack itemStack)
{
if (itemStack.hasTagCompound())
{
return itemStack.getTagCompound().getInteger("energy");
}
else
{
this.setEnergy(itemStack, 0);
}
return 0;
}
@Override
public int getMaxEnergyStored(ItemStack container)
{
return this.maxEnergy;
}
@Override
public short getMaxSend()
{
return this.maxSend;
}
@Override
public double getDurabilityForDisplay(ItemStack stack)
{
return 1.0D - ((double) getEnergyStored(stack) / getMaxEnergyStored(stack));
}
@Override
public boolean showDurabilityBar(ItemStack stack)
{
return true;
}
}
| src/main/java/boilerplate/common/baseclasses/BaseElectricItem.java | /**
* This class was created by BrassGoggledCoders modding team.
* This class is available as part of the BoilerCraft Mod for Minecraft.
*
* BoilerCraft is open-source and is distributed under the MMPL v1.0 License.
* (http://www.mod-buildcraft.com/MMPL-1.0.txt)
*
*/
package boilerplate.common.baseclasses;
import java.util.List;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.world.World;
import boilerplate.api.IEnergyItem;
/**
* @author decebaldecebal
*
*/
public abstract class BaseElectricItem extends RootItem implements IEnergyItem
{
protected int maxEnergy;
protected short maxReceive;
protected short maxSend;
public BaseElectricItem(int maxEnergy, int maxSend, int maxReceive)
{
super();
this.maxEnergy = maxEnergy * 1000;
this.maxReceive = (short) maxReceive;
this.maxSend = (short) maxSend;
this.setMaxStackSize(1);
this.setMaxDamage(20);
this.setHasSubtypes(true);
this.setNoRepair();
}
@SuppressWarnings("all")
@Override
public void getSubItems(Item item, CreativeTabs tab, List list)
{
list.add(this.getUnchargedItem(item));
list.add(this.getChargedItem(item));
}
public ItemStack getUnchargedItem(Item item)
{
ItemStack uncharged = new ItemStack(item, 1, 20);
if (!uncharged.hasTagCompound())
{
uncharged.setTagCompound(new NBTTagCompound());
}
uncharged.getTagCompound().setInteger("energy", 0);
return uncharged.copy();
}
public ItemStack getChargedItem(Item item)
{
ItemStack charged = new ItemStack(item, 1, 0);
if (!charged.hasTagCompound())
{
charged.setTagCompound(new NBTTagCompound());
}
charged.getTagCompound().setInteger("energy", this.maxEnergy);
return charged.copy();
}
@SuppressWarnings("all")
@Override
public void addInformation(ItemStack stack, EntityPlayer entityplayer, List list, boolean flag)
{
list.add("Energy: " + (this.getEnergyStored(stack) / 1000) + "k / " + (this.maxEnergy / 1000) + "k");
list.add("Transfer(in/out): " + this.maxReceive + " / " + this.maxSend);
}
@Override
public void onCreated(ItemStack stack, World par2World, EntityPlayer par3EntityPlayer)
{
this.setEnergy(stack, 0);
}
public void setEnergy(ItemStack stack, int energy)
{
if (!stack.hasTagCompound())
{
stack.setTagCompound(new NBTTagCompound());
}
NBTTagCompound tag = stack.getTagCompound();
if (energy < 0)
{
energy = 0;
}
if (energy > this.maxEnergy)
{
energy = this.maxEnergy;
}
stack.setItemDamage(20 - ((energy * 20) / this.maxEnergy));
tag.setInteger("energy", energy);
stack.setTagCompound(tag);
}
@Override
public int receiveEnergy(ItemStack itemStack, int maxReceive, boolean simulate)
{
int received = Math.min(this.maxEnergy - this.getEnergyStored(itemStack), maxReceive);
received = Math.min(received, this.maxReceive);
if (!simulate)
{
this.setEnergy(itemStack, this.getEnergyStored(itemStack) + received);
}
return received;
}
@Override
public int extractEnergy(ItemStack itemStack, int maxExtract, boolean simulate)
{
int extracted = Math.min(this.getEnergyStored(itemStack), maxExtract);
extracted = Math.min(extracted, this.maxSend);
if (!simulate)
{
this.setEnergy(itemStack, this.getEnergyStored(itemStack) - extracted);
}
return extracted;
}
@Override
public int getEnergyStored(ItemStack itemStack)
{
if (itemStack.hasTagCompound())
{
return itemStack.getTagCompound().getInteger("energy");
}
else
{
this.setEnergy(itemStack, 0);
}
return 0;
}
@Override
public int getMaxEnergyStored(ItemStack container)
{
return this.maxEnergy;
}
@Override
public short getMaxSend()
{
return this.maxSend;
}
}
| Start switching over electric items to non-meta damagebars
| src/main/java/boilerplate/common/baseclasses/BaseElectricItem.java | Start switching over electric items to non-meta damagebars |
|
Java | mit | 1fc9423fa234b4472aaae505939d1979373b3497 | 0 | jakobehmsen/dynamake | package dynamake.models;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.event.ComponentEvent;
import java.awt.event.ComponentListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import javax.swing.BorderFactory;
import javax.swing.ButtonGroup;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.JLayeredPane;
import javax.swing.JPanel;
import javax.swing.SwingConstants;
import javax.swing.SwingUtilities;
import javax.swing.border.Border;
import dynamake.commands.Command;
import dynamake.commands.DualCommand;
import dynamake.commands.DualCommandPair;
import dynamake.delegates.Action1;
import dynamake.menubuilders.CompositeMenuBuilder;
import dynamake.models.factories.Factory;
import dynamake.tools.Tool;
import dynamake.transcription.DualCommandFactory;
import dynamake.transcription.RepaintRunBuilder;
import dynamake.transcription.TranscriberBranch;
public class LiveModel extends Model {
/**
*
*/
private static final long serialVersionUID = 1L;
public static class ButtonsToolBindingChanged {
public final List<Integer> buttons;
public final int tool;
public ButtonsToolBindingChanged(List<Integer> buttons, int tool) {
this.buttons = buttons;
this.tool = tool;
}
}
private Model content;
private Hashtable<List<Integer>, Integer> buttonsToToolMap = new Hashtable<List<Integer>, Integer>();
public LiveModel(Model content) {
this.content = content;
}
@Override
public Model modelCloneIsolated() {
LiveModel clone = new LiveModel(content.cloneIsolated());
clone.buttonsToToolMap.putAll(clone.buttonsToToolMap);
return clone;
}
public int getToolForButtons(List<Integer> buttons) {
Integer tool = buttonsToToolMap.get(buttons);
return tool != null ? tool : -1;
}
public List<Integer> getButtonsForTool(int tool) {
for(Map.Entry<List<Integer>, Integer> entry: buttonsToToolMap.entrySet()) {
if(entry.getValue() == tool)
return entry.getKey();
}
return Collections.emptyList();
}
public void removeButtonsToToolBinding(List<Integer> buttons, int tool, PropogationContext propCtx, int propDistance, TranscriberBranch<Model> branch) {
buttonsToToolMap.remove(buttons);
sendChanged(new ButtonsToolBindingChanged(Collections.<Integer>emptyList(), tool), propCtx, propDistance, 0, branch);
}
public void bindButtonsToTool(List<Integer> buttons, int tool, PropogationContext propCtx, int propDistance, TranscriberBranch<Model> branch) {
buttonsToToolMap.put(buttons, tool);
sendChanged(new ButtonsToolBindingChanged(buttons, tool), propCtx, propDistance, 0, branch);
}
public static class BindButtonsToToolCommand implements Command<Model> {
/**
*
*/
private static final long serialVersionUID = 1L;
private Location modelLocation;
private List<Integer> buttons;
private int tool;
public BindButtonsToToolCommand(Location modelLocation, List<Integer> buttons, int tool) {
this.modelLocation = modelLocation;
this.buttons = buttons;
this.tool = tool;
}
@Override
public void executeOn(PropogationContext propCtx, Model prevalentSystem, Date executionTime, TranscriberBranch<Model> branch) {
LiveModel liveModel = (LiveModel)modelLocation.getChild(prevalentSystem);
liveModel.bindButtonsToTool(buttons, tool, propCtx, 0, branch);
}
}
public static class RemoveButtonsToToolBindingCommand implements Command<Model> {
/**
*
*/
private static final long serialVersionUID = 1L;
private Location modelLocation;
private List<Integer> buttons;
private int tool;
public RemoveButtonsToToolBindingCommand(Location modelLocation, List<Integer> buttons, int tool) {
this.modelLocation = modelLocation;
this.buttons = buttons;
this.tool = tool;
}
@Override
public void executeOn(PropogationContext propCtx, Model prevalentSystem, Date executionTime, TranscriberBranch<Model> branch) {
LiveModel liveModel = (LiveModel)modelLocation.getChild(prevalentSystem);
liveModel.removeButtonsToToolBinding(buttons, tool, propCtx, 0, branch);
}
}
public static class ContentLocator implements dynamake.models.ModelLocator {
@Override
public ModelLocation locate() {
return new FieldContentLocation();
}
}
private static class FieldContentLocation implements ModelLocation {
/**
*
*/
private static final long serialVersionUID = 1L;
@Override
public Object getChild(Object holder) {
return ((LiveModel)holder).content;
}
@Override
public Location getModelComponentLocation() {
return new ViewFieldContentLocation();
}
}
private static class ViewFieldContentLocation implements Location {
@Override
public Object getChild(Object holder) {
return ((LivePanel)holder).contentView.getBindingTarget();
}
}
private static final int BUTTON_FONT_SIZE = 13;
private static final Color TOP_BACKGROUND_COLOR = new Color(90, 90, 90);
private static final Color TOP_BUTTON_BACKGROUND_COLOR = TOP_BACKGROUND_COLOR;
private static final Color TOP_FOREGROUND_COLOR = Color.WHITE;
public static class ToolButton extends JPanel {
/**
*
*/
private static final long serialVersionUID = 1L;
private int tool;
private List<Integer> buttons;
private String text;
private LiveModel liveModel;
private ModelTranscriber modelTranscriber;
private JLabel labelToolName;
private JPanel panelButtons;
public ToolButton(int tool, List<Integer> buttons, String text, LiveModel liveModel, ModelTranscriber modelTranscriber) {
this.tool = tool;
this.buttons = buttons;
this.text = text;
this.liveModel = liveModel;
this.modelTranscriber = modelTranscriber;
setLayout(new BorderLayout(0, 0));
setBackground(TOP_BUTTON_BACKGROUND_COLOR);
labelToolName = new JLabel();
labelToolName.setHorizontalAlignment(SwingConstants.CENTER);
labelToolName.setForeground(TOP_FOREGROUND_COLOR);
labelToolName.setFont(new Font(labelToolName.getFont().getFontName(), Font.BOLD, BUTTON_FONT_SIZE));
add(labelToolName, BorderLayout.CENTER);
panelButtons = new JPanel();
panelButtons.setLayout(new FlowLayout(FlowLayout.CENTER, 2, 0));
panelButtons.setOpaque(false);
add(panelButtons, BorderLayout.NORTH);
this.setPreferredSize(new Dimension(72, 45));
update();
this.addMouseListener(new MouseAdapter() {
int buttonsDown = 0;
ArrayList<Integer> buttonsPressed = new ArrayList<Integer>();
@Override
public void mousePressed(MouseEvent e) {
int newButton = e.getButton();
buttonsDown++;
buttonsPressed.add(newButton);
Collections.sort(buttonsPressed);
if(buttonsDown == 1) {
setBackground(TOP_BUTTON_BACKGROUND_COLOR.brighter());
}
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
update(buttonsPressed);
ToolButton.this.repaint();
}
});
}
@Override
public void mouseReleased(MouseEvent e) {
buttonsDown--;
if(buttonsDown == 0) {
setBackground(TOP_BUTTON_BACKGROUND_COLOR);
PropogationContext propCtx = new PropogationContext();
TranscriberBranch<Model> branch = ToolButton.this.modelTranscriber.createBranch();
@SuppressWarnings("unchecked")
final ArrayList<Integer> localButtonsPressed = (ArrayList<Integer>)buttonsPressed.clone();
branch.execute(propCtx, new DualCommandFactory<Model>() {
@Override
public void createDualCommands(List<DualCommand<Model>> dualCommands) {
List<Integer> currentButtons = ToolButton.this.buttons;
Location modelLocation = ToolButton.this.modelTranscriber.getModelLocation();
int previousToolForNewButton = ToolButton.this.liveModel.getToolForButtons(localButtonsPressed);
if(previousToolForNewButton != -1) {
// If the new buttons are associated to another tool, then remove that binding
dualCommands.add(new DualCommandPair<Model>(
new RemoveButtonsToToolBindingCommand(modelLocation, localButtonsPressed, previousToolForNewButton),
new BindButtonsToToolCommand(modelLocation, localButtonsPressed, previousToolForNewButton))
);
}
if(currentButtons.size() > 0) {
// If this tool is associated to buttons, then remove that binding before
dualCommands.add(new DualCommandPair<Model>(
new RemoveButtonsToToolBindingCommand(modelLocation, currentButtons, ToolButton.this.tool),
new BindButtonsToToolCommand(modelLocation, currentButtons, ToolButton.this.tool))
);
// adding the replacement binding
dualCommands.add(new DualCommandPair<Model>(
new BindButtonsToToolCommand(modelLocation, localButtonsPressed, ToolButton.this.tool),
new RemoveButtonsToToolBindingCommand(modelLocation, localButtonsPressed, ToolButton.this.tool))
);
} else {
dualCommands.add(new DualCommandPair<Model>(
new BindButtonsToToolCommand(modelLocation, localButtonsPressed, ToolButton.this.tool),
new RemoveButtonsToToolBindingCommand(modelLocation, localButtonsPressed, ToolButton.this.tool)
));
}
}
});
branch.close();
buttonsPressed.clear();
}
}
});
// // Support for binding a key combination with a tool
// // It should be possible to both bind a key combination AND a mouse button to the same tool at the same time
// KeyListener keyListener = new KeyAdapter() {
// @Override
// public void keyPressed(KeyEvent e) {
//// System.out.println(e.isControlDown() + ":" + e.getKeyCode());
// }
//
// @Override
// public void keyTyped(KeyEvent e) {
//// System.out.println(e.isControlDown() + ":" + e.getKeyCode());
// }
// };
setFocusable(true);
// this.addKeyListener(keyListener);
// labelToolName.addKeyListener(keyListener);
// labelButton.addKeyListener(keyListener);
}
private static final Color[] BUTTON_COLORS = new Color[] {
new Color(255, 120, 10),//Color.RED,
new Color(10, 220, 10), //Color.GREEN,
new Color(10, 10, 220), //Color.BLUE,
new Color(10, 220, 220), //Color.CYAN,
new Color(220, 220, 10), //Color.ORANGE
new Color(220, 10, 220),
};
public static Color getColorForButton(int button) {
return BUTTON_COLORS[button - 1];
}
private void update(List<Integer> buttons) {
labelToolName.setText(text);
Border innerBorder = BorderFactory.createEmptyBorder(0, 5, 0, 5);
Border outerBorder;
panelButtons.removeAll();
if(buttons.size() > 0) {
for(int button: buttons) {
JLabel buttonLabel = new JLabel();
buttonLabel.setHorizontalAlignment(SwingConstants.CENTER);
buttonLabel.setForeground(getColorForButton(button));
buttonLabel.setFont(new Font(buttonLabel.getFont().getFontName(), Font.ITALIC | Font.BOLD, 16));
buttonLabel.setText("" + button);
panelButtons.add(buttonLabel);
}
panelButtons.revalidate();
outerBorder = BorderFactory.createLoweredSoftBevelBorder();
} else {
JLabel buttonLabel = new JLabel();
buttonLabel.setText(" ");
buttonLabel.setForeground(null);
panelButtons.add(buttonLabel);
outerBorder = BorderFactory.createRaisedSoftBevelBorder();
}
setBorder(BorderFactory.createCompoundBorder(outerBorder, innerBorder));
}
private void update() {
update(buttons);
}
public void setButtons(List<Integer> buttons) {
this.buttons = buttons;
update();
}
}
private static JComponent createToolButton(final LiveModel model, final ModelTranscriber modelTranscriber, ButtonGroup group, List<Integer> buttons, final int tool, final String text) {
return new ToolButton(tool, buttons, text, model, modelTranscriber);
}
private static void updateToolButton(JComponent toolButton, List<Integer> buttons) {
((ToolButton)toolButton).setButtons(buttons);
}
public static class ProductionPanel extends JPanel {
/**
*
*/
private static final long serialVersionUID = 1L;
public static final Color TARGET_OVER_COLOR = new Color(35, 89, 184);
public static final Color BIND_COLOR = new Color(25, 209, 89);
public static final Color UNBIND_COLOR = new Color(240, 34, 54);
public static final Color SELECTION_COLOR = Color.GRAY;
public static class EditPanelMouseAdapter extends MouseAdapter {
public ProductionPanel productionPanel;
public int buttonPressed;
public static final int HORIZONTAL_REGION_WEST = 0;
public static final int HORIZONTAL_REGION_CENTER = 1;
public static final int HORIZONTAL_REGION_EAST = 2;
public static final int VERTICAL_REGION_NORTH = 0;
public static final int VERTICAL_REGION_CENTER = 1;
public static final int VERTICAL_REGION_SOUTH = 2;
public EditPanelMouseAdapter(ProductionPanel productionPanel) {
this.productionPanel = productionPanel;
}
private Tool getTool(int button) {
// int toolForButton = productionPanel.livePanel.model.getToolForButton(button);
int toolForButton = productionPanel.livePanel.model.getToolForButtons(Arrays.asList(button));
if(toolForButton != -1) {
return productionPanel.livePanel.viewManager.getTools()[toolForButton];
} else {
return new Tool() {
@Override
public void mouseReleased(ProductionPanel productionPanel, MouseEvent e, ModelComponent modelOver) { }
@Override
public void mousePressed(ProductionPanel productionPanel, MouseEvent e, ModelComponent modelOver) { }
@Override
public void mouseMoved(ProductionPanel productionPanel, MouseEvent e, ModelComponent modelOver) { }
@Override
public void mouseExited(ProductionPanel productionPanel, MouseEvent e) { }
@Override
public void mouseDragged(ProductionPanel productionPanel, MouseEvent e, ModelComponent modelOver) { }
@Override
public String getName() { return null; }
@Override
public void paint(Graphics g) { }
};
}
}
public ModelComponent getModelOver(MouseEvent e) {
Point pointInContentView = SwingUtilities.convertPoint((JComponent) e.getSource(), e.getPoint(), (JComponent)productionPanel.contentView.getBindingTarget());
JComponent componentOver = (JComponent)((JComponent)productionPanel.contentView.getBindingTarget()).findComponentAt(pointInContentView);
return ModelComponent.Util.closestModelComponent(componentOver);
}
public void mousePressed(final MouseEvent e) {
final ModelComponent modelOver = getModelOver(e);
productionPanel.editPanelMouseAdapter.buttonPressed = e.getButton();
final int localButtonPressed = productionPanel.editPanelMouseAdapter.buttonPressed;
// System.out.println("Pressed button " + localButtonPressed);
productionPanel.livePanel.getModelTranscriber().executeTransient(new Runnable() {
@Override
public void run() {
getTool(localButtonPressed).mousePressed(productionPanel, e, modelOver);
}
});
}
public void mouseDragged(final MouseEvent e) {
final ModelComponent modelOver = getModelOver(e);
if(modelOver != null) {
final int localButtonPressed = productionPanel.editPanelMouseAdapter.buttonPressed;
// System.out.println("Dragged button " + localButtonPressed);
productionPanel.livePanel.getModelTranscriber().executeTransient(new Runnable() {
@Override
public void run() {
getTool(localButtonPressed).mouseDragged(productionPanel, e, modelOver);
}
});
}
}
public void mouseReleased(final MouseEvent e) {
final ModelComponent modelOver = getModelOver(e);
if(modelOver != null) {
final int localButtonPressed = productionPanel.editPanelMouseAdapter.buttonPressed;
productionPanel.editPanelMouseAdapter.buttonPressed = 0;
// System.out.println("Released button " + localButtonPressed);
productionPanel.livePanel.getModelTranscriber().executeTransient(new Runnable() {
@Override
public void run() {
getTool(localButtonPressed).mouseReleased(productionPanel, e, modelOver);
}
});
}
}
@Override
public void mouseMoved(final MouseEvent e) {
final ModelComponent modelOver = getModelOver(e);
if(modelOver != null) {
productionPanel.livePanel.getModelTranscriber().executeTransient(new Runnable() {
@Override
public void run() {
// The tool associated to button 1 is used as a "master" tool
int button = 1;
getTool(button).mouseMoved(productionPanel, e, modelOver);
}
});
}
}
}
public LivePanel livePanel;
public Binding<ModelComponent> contentView;
public EditPanelMouseAdapter editPanelMouseAdapter;
public ProductionPanel(final LivePanel livePanel, final Binding<ModelComponent> contentView) {
this.setLayout(null);
this.livePanel = livePanel;
this.contentView = contentView;
// TODO: Consider the following:
// For a selected frame, it should be possible to scroll upwards to select its immediate parent
// - and scroll downwards to select its root parents
editPanelMouseAdapter = new EditPanelMouseAdapter(this);
this.addMouseListener(editPanelMouseAdapter);
this.addMouseMotionListener(editPanelMouseAdapter);
this.setOpaque(true);
this.setBackground(new Color(0, 0, 0, 0));
}
@Override
public void paint(Graphics g) {
super.paint(g);
editPanelMouseAdapter.getTool(editPanelMouseAdapter.buttonPressed).paint(g);
}
}
public static class LivePanel extends JPanel implements ModelComponent {
/**
*
*/
private static final long serialVersionUID = 1L;
public LiveModel model;
private JPanel topPanel;
private JLayeredPane contentPane;
private RemovableListener removableListener;
public ProductionPanel productionPanel;
public ViewManager viewManager;
private ModelTranscriber modelTranscriber;
private JComponent[] buttonTools;
private final Binding<ModelComponent> contentView;
public LivePanel(final ModelComponent rootView, LiveModel model, ModelTranscriber modelTranscriber, final ViewManager viewManager) {
this.setLayout(new BorderLayout());
this.model = model;
this.viewManager = viewManager;
this.modelTranscriber = modelTranscriber;
ViewManager newViewManager = new ViewManager() {
@Override
public Factory[] getFactories() {
return viewManager.getFactories();
}
@Override
public Tool[] getTools() {
return null;
}
};
contentView = model.getContent().createView(rootView, newViewManager, modelTranscriber.extend(new ContentLocator()));
productionPanel = new ProductionPanel(this, contentView);
topPanel = new JPanel();
topPanel.setBackground(TOP_BACKGROUND_COLOR);
topPanel.setBorder(BorderFactory.createLineBorder(Color.BLACK));
contentPane = new JLayeredPane();
productionPanel.setSize(contentPane.getSize().width, contentPane.getSize().height - 1);
contentPane.addComponentListener(new ComponentListener() {
@Override
public void componentShown(ComponentEvent e) { }
@Override
public void componentResized(ComponentEvent e) {
((JComponent)contentView.getBindingTarget()).setSize(((JComponent)e.getSource()).getSize());
if(productionPanel != null) {
productionPanel.setSize(((JComponent)e.getSource()).getSize().width, ((JComponent)e.getSource()).getSize().height - 1);
}
}
@Override
public void componentMoved(ComponentEvent e) { }
@Override
public void componentHidden(ComponentEvent e) { }
});
contentPane.add((JComponent)contentView.getBindingTarget(), JLayeredPane.DEFAULT_LAYER);
this.add(topPanel, BorderLayout.NORTH);
this.add(contentPane, BorderLayout.CENTER);
removableListener = Model.RemovableListener.addObserver(model, new ObserverAdapter() {
{
initializeObserverAdapter();
}
private void initializeObserverAdapter() {
}
@Override
public void changed(Model sender, Object change, final PropogationContext propCtx, int propDistance, int changeDistance, TranscriberBranch<Model> branch) {
// if(change instanceof LiveModel.ButtonToolBindingChanged) {
// LiveModel.ButtonToolBindingChanged bindButtonChanged = (LiveModel.ButtonToolBindingChanged)change;
//
// if(bindButtonChanged.tool != -1) {
// JComponent buttonNewTool = buttonTools[bindButtonChanged.tool];
// updateToolButton(buttonNewTool, bindButtonChanged.button);
// }
// }
if(change instanceof LiveModel.ButtonsToolBindingChanged) {
LiveModel.ButtonsToolBindingChanged bindButtonChanged = (LiveModel.ButtonsToolBindingChanged)change;
if(bindButtonChanged.tool != -1) {
JComponent buttonNewTool = buttonTools[bindButtonChanged.tool];
updateToolButton(buttonNewTool, bindButtonChanged.buttons);
}
}
}
});
contentPane.add(productionPanel, JLayeredPane.MODAL_LAYER);
}
@Override
public void initialize() {
Tool[] tools = viewManager.getTools();
buttonTools = new JComponent[tools.length];
ButtonGroup group = new ButtonGroup();
for(int i = 0; i < tools.length; i++) {
Tool tool = tools[i];
// int button = model.getButtonForTool(i);
// buttonTools[i] = createToolButton(model, modelTranscriber, group, button, i, tool.getName());
List<Integer> buttons = model.getButtonsForTool(i);
buttonTools[i] = createToolButton(model, modelTranscriber, group, buttons, i, tool.getName());
}
for(JComponent buttonTool: buttonTools) {
topPanel.add(buttonTool);
}
TranscriberBranch<Model> initializationBranch = getModelTranscriber().createBranch();
initializationBranch.setOnFinishedBuilder(new RepaintRunBuilder(productionPanel.livePanel));
initializationBranch.close();
}
public Factory[] getFactories() {
return viewManager.getFactories();
}
@Override
public Model getModelBehind() {
return model;
}
@Override
public void appendContainerTransactions(LivePanel livePanel, CompositeMenuBuilder menuBuilder, ModelComponent child, TranscriberBranch<Model> branch) {
}
@Override
public void appendTransactions(ModelComponent livePanel, CompositeMenuBuilder menuBuilder, TranscriberBranch<Model> branch) {
}
@Override
public void appendDroppedTransactions(ModelComponent livePanel, ModelComponent target, Rectangle droppedBounds, CompositeMenuBuilder menuBuilder, TranscriberBranch<Model> branch) {
Model.appendGeneralDroppedTransactions(livePanel, this, target, droppedBounds, menuBuilder, branch);
}
@Override
public void appendDropTargetTransactions(
ModelComponent livePanel, ModelComponent dropped, Rectangle droppedBounds, Point dropPoint, CompositeMenuBuilder menuBuilder, TranscriberBranch<Model> branch) {
}
@Override
public ModelTranscriber getModelTranscriber() {
return modelTranscriber;
}
public void releaseBinding() {
removableListener.releaseBinding();
}
@Override
public DualCommandFactory<Model> getImplicitDropAction(ModelComponent target) {
return null;
}
@Override
public void visitTree(Action1<ModelComponent> visitAction) {
visitAction.run(this);
}
}
@Override
public Binding<ModelComponent> createView(ModelComponent rootView, ViewManager viewManager, ModelTranscriber modelTranscriber) {
this.setLocation(modelTranscriber.getModelLocator());
final LivePanel view = new LivePanel(rootView, this, modelTranscriber, viewManager);
return new Binding<ModelComponent>() {
@Override
public void releaseBinding() {
view.releaseBinding();
}
@Override
public ModelComponent getBindingTarget() {
return view;
}
};
}
public Model getContent() {
return content;
}
}
| eclipse/src/dynamake/models/LiveModel.java | package dynamake.models;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.event.ComponentEvent;
import java.awt.event.ComponentListener;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import javax.swing.BorderFactory;
import javax.swing.ButtonGroup;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.JLayeredPane;
import javax.swing.JPanel;
import javax.swing.SwingConstants;
import javax.swing.SwingUtilities;
import javax.swing.border.Border;
import dynamake.commands.Command;
import dynamake.commands.DualCommand;
import dynamake.commands.DualCommandPair;
import dynamake.delegates.Action1;
import dynamake.menubuilders.CompositeMenuBuilder;
import dynamake.models.factories.Factory;
import dynamake.tools.Tool;
import dynamake.transcription.DualCommandFactory;
import dynamake.transcription.RepaintRunBuilder;
import dynamake.transcription.TranscriberBranch;
public class LiveModel extends Model {
/**
*
*/
private static final long serialVersionUID = 1L;
// public static class ButtonToolBindingChanged {
// public final int button;
// public final int tool;
//
// public ButtonToolBindingChanged(int button, int tool) {
// this.button = button;
// this.tool = tool;
// }
// }
public static class ButtonsToolBindingChanged {
public final List<Integer> buttons;
public final int tool;
public ButtonsToolBindingChanged(List<Integer> buttons, int tool) {
this.buttons = buttons;
this.tool = tool;
}
}
private Model content;
// private Hashtable<Integer, Integer> buttonToToolMap = new Hashtable<Integer, Integer>();
private Hashtable<List<Integer>, Integer> buttonsToToolMap = new Hashtable<List<Integer>, Integer>();
public LiveModel(Model content) {
this.content = content;
}
@Override
public Model modelCloneIsolated() {
LiveModel clone = new LiveModel(content.cloneIsolated());
// clone.buttonToToolMap.putAll(clone.buttonToToolMap);
clone.buttonsToToolMap.putAll(clone.buttonsToToolMap);
return clone;
}
// public int getToolForButton(int button) {
// Integer tool = buttonToToolMap.get(button);
// return tool != null ? tool : -1;
// }
//
// public int getButtonForTool(int tool) {
// for(Map.Entry<Integer, Integer> entry: buttonToToolMap.entrySet()) {
// if(entry.getValue() == tool)
// return entry.getKey();
// }
//
// return -1;
// }
//
// public void removeButtonToToolBinding(int button, int tool, PropogationContext propCtx, int propDistance, TranscriberBranch<Model> branch) {
// buttonToToolMap.remove(button);
// sendChanged(new ButtonToolBindingChanged(-1, tool), propCtx, propDistance, 0, branch);
// }
//
// public void bindButtonToTool(int button, int tool, PropogationContext propCtx, int propDistance, TranscriberBranch<Model> branch) {
// buttonToToolMap.put(button, tool);
// sendChanged(new ButtonToolBindingChanged(button, tool), propCtx, propDistance, 0, branch);
// }
public int getToolForButtons(List<Integer> buttons) {
Integer tool = buttonsToToolMap.get(buttons);
return tool != null ? tool : -1;
}
public List<Integer> getButtonsForTool(int tool) {
for(Map.Entry<List<Integer>, Integer> entry: buttonsToToolMap.entrySet()) {
if(entry.getValue() == tool)
return entry.getKey();
}
return Collections.emptyList();
}
// public static class BindButtonToToolCommand implements Command<Model> {
// /**
// *
// */
// private static final long serialVersionUID = 1L;
// private Location modelLocation;
// private int button;
// private int tool;
//
// public BindButtonToToolCommand(Location modelLocation, int button, int tool) {
// this.modelLocation = modelLocation;
// this.button = button;
// this.tool = tool;
// }
//
// @Override
// public void executeOn(PropogationContext propCtx, Model prevalentSystem, Date executionTime, TranscriberBranch<Model> branch) {
// LiveModel liveModel = (LiveModel)modelLocation.getChild(prevalentSystem);
// liveModel.bindButtonToTool(button, tool, propCtx, 0, branch);
// }
// }
//
// public static class RemoveButtonToToolBindingCommand implements Command<Model> {
// /**
// *
// */
// private static final long serialVersionUID = 1L;
// private Location modelLocation;
// private int button;
// private int tool;
//
// public RemoveButtonToToolBindingCommand(Location modelLocation, int button, int tool) {
// this.modelLocation = modelLocation;
// this.button = button;
// this.tool = tool;
// }
//
// @Override
// public void executeOn(PropogationContext propCtx, Model prevalentSystem, Date executionTime, TranscriberBranch<Model> branch) {
// LiveModel liveModel = (LiveModel)modelLocation.getChild(prevalentSystem);
// liveModel.removeButtonToToolBinding(button, tool, propCtx, 0, branch);
// }
// }
public void removeButtonsToToolBinding(List<Integer> buttons, int tool, PropogationContext propCtx, int propDistance, TranscriberBranch<Model> branch) {
buttonsToToolMap.remove(buttons);
sendChanged(new ButtonsToolBindingChanged(Collections.<Integer>emptyList(), tool), propCtx, propDistance, 0, branch);
}
public void bindButtonsToTool(List<Integer> buttons, int tool, PropogationContext propCtx, int propDistance, TranscriberBranch<Model> branch) {
buttonsToToolMap.put(buttons, tool);
sendChanged(new ButtonsToolBindingChanged(buttons, tool), propCtx, propDistance, 0, branch);
}
public static class BindButtonsToToolCommand implements Command<Model> {
/**
*
*/
private static final long serialVersionUID = 1L;
private Location modelLocation;
private List<Integer> buttons;
private int tool;
public BindButtonsToToolCommand(Location modelLocation, List<Integer> buttons, int tool) {
this.modelLocation = modelLocation;
this.buttons = buttons;
this.tool = tool;
}
@Override
public void executeOn(PropogationContext propCtx, Model prevalentSystem, Date executionTime, TranscriberBranch<Model> branch) {
LiveModel liveModel = (LiveModel)modelLocation.getChild(prevalentSystem);
liveModel.bindButtonsToTool(buttons, tool, propCtx, 0, branch);
}
}
public static class RemoveButtonsToToolBindingCommand implements Command<Model> {
/**
*
*/
private static final long serialVersionUID = 1L;
private Location modelLocation;
private List<Integer> buttons;
private int tool;
public RemoveButtonsToToolBindingCommand(Location modelLocation, List<Integer> buttons, int tool) {
this.modelLocation = modelLocation;
this.buttons = buttons;
this.tool = tool;
}
@Override
public void executeOn(PropogationContext propCtx, Model prevalentSystem, Date executionTime, TranscriberBranch<Model> branch) {
LiveModel liveModel = (LiveModel)modelLocation.getChild(prevalentSystem);
liveModel.removeButtonsToToolBinding(buttons, tool, propCtx, 0, branch);
}
}
public static class ContentLocator implements dynamake.models.ModelLocator {
@Override
public ModelLocation locate() {
return new FieldContentLocation();
}
}
private static class FieldContentLocation implements ModelLocation {
/**
*
*/
private static final long serialVersionUID = 1L;
@Override
public Object getChild(Object holder) {
return ((LiveModel)holder).content;
}
@Override
public Location getModelComponentLocation() {
return new ViewFieldContentLocation();
}
}
private static class ViewFieldContentLocation implements Location {
@Override
public Object getChild(Object holder) {
return ((LivePanel)holder).contentView.getBindingTarget();
}
}
private static final int BUTTON_FONT_SIZE = 13;
private static final Color TOP_BACKGROUND_COLOR = new Color(90, 90, 90);
private static final Color TOP_BUTTON_BACKGROUND_COLOR = TOP_BACKGROUND_COLOR;
private static final Color TOP_FOREGROUND_COLOR = Color.WHITE;
public static class ToolButton extends JPanel {
/**
*
*/
private static final long serialVersionUID = 1L;
private int tool;
// private int button;
private List<Integer> buttons;
private String text;
private LiveModel liveModel;
private ModelTranscriber modelTranscriber;
private JLabel labelToolName;
private JPanel panelButtons;
public ToolButton(int tool, List<Integer> buttons, String text, LiveModel liveModel, ModelTranscriber modelTranscriber) {
this.tool = tool;
this.buttons = buttons;
this.text = text;
this.liveModel = liveModel;
this.modelTranscriber = modelTranscriber;
setLayout(new BorderLayout(0, 0));
setBackground(TOP_BUTTON_BACKGROUND_COLOR);
labelToolName = new JLabel();
labelToolName.setHorizontalAlignment(SwingConstants.CENTER);
labelToolName.setForeground(TOP_FOREGROUND_COLOR);
labelToolName.setFont(new Font(labelToolName.getFont().getFontName(), Font.BOLD, BUTTON_FONT_SIZE));
add(labelToolName, BorderLayout.CENTER);
panelButtons = new JPanel();
panelButtons.setLayout(new FlowLayout(FlowLayout.CENTER, 2, 0));
panelButtons.setOpaque(false);
add(panelButtons, BorderLayout.NORTH);
this.setPreferredSize(new Dimension(72, 45));
update();
this.addMouseListener(new MouseAdapter() {
int buttonsDown = 0;
ArrayList<Integer> buttonsPressed = new ArrayList<Integer>();
@Override
public void mousePressed(MouseEvent e) {
int newButton = e.getButton();
buttonsDown++;
buttonsPressed.add(newButton);
Collections.sort(buttonsPressed);
if(buttonsDown == 1) {
setBackground(TOP_BUTTON_BACKGROUND_COLOR.brighter());
}
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
System.out.println("Update");
update(buttonsPressed);
ToolButton.this.repaint();
}
});
}
@Override
public void mouseReleased(MouseEvent e) {
buttonsDown--;
if(buttonsDown == 0) {
setBackground(TOP_BUTTON_BACKGROUND_COLOR);
final int newButton = buttonsPressed.get(0);
PropogationContext propCtx = new PropogationContext();
TranscriberBranch<Model> branch = ToolButton.this.modelTranscriber.createBranch();
final ArrayList<Integer> localButtonsPressed = (ArrayList<Integer>)buttonsPressed.clone();
branch.execute(propCtx, new DualCommandFactory<Model>() {
@Override
public void createDualCommands(List<DualCommand<Model>> dualCommands) {
// setBackground(TOP_BUTTON_BACKGROUND_COLOR.brighter());
List<Integer> currentButtons = ToolButton.this.buttons;
Location modelLocation = ToolButton.this.modelTranscriber.getModelLocation();
// int previousToolForNewButton = ToolButton.this.liveModel.getToolForButton(newButton);
//
// if(previousToolForNewButton != -1) {
// // If the new button is associated to another tool, then remove that binding
// dualCommands.add(new DualCommandPair<Model>(
// new RemoveButtonToToolBindingCommand(modelLocation, newButton, previousToolForNewButton),
// new BindButtonToToolCommand(modelLocation, newButton, previousToolForNewButton))
// );
// }
//
// if(currentButton != -1) {
// // If this tool is associated to button, then remove that binding before
// dualCommands.add(new DualCommandPair<Model>(
// new RemoveButtonToToolBindingCommand(modelLocation, currentButton, ToolButton.this.tool),
// new BindButtonToToolCommand(modelLocation, currentButton, ToolButton.this.tool))
// );
//
// // adding the replacement binding
// dualCommands.add(new DualCommandPair<Model>(
// new BindButtonToToolCommand(modelLocation, newButton, ToolButton.this.tool),
// new RemoveButtonToToolBindingCommand(modelLocation, newButton, ToolButton.this.tool))
// );
// } else {
// dualCommands.add(new DualCommandPair<Model>(
// new BindButtonToToolCommand(modelLocation, newButton, ToolButton.this.tool),
// new RemoveButtonToToolBindingCommand(modelLocation, newButton, ToolButton.this.tool)
// ));
// }
int previousToolForNewButton = ToolButton.this.liveModel.getToolForButtons(localButtonsPressed);
if(previousToolForNewButton != -1) {
// If the new button is associated to another tool, then remove that binding
dualCommands.add(new DualCommandPair<Model>(
new RemoveButtonsToToolBindingCommand(modelLocation, localButtonsPressed, previousToolForNewButton),
new BindButtonsToToolCommand(modelLocation, localButtonsPressed, previousToolForNewButton))
);
}
if(currentButtons.size() > 0) {
// If this tool is associated to button, then remove that binding before
dualCommands.add(new DualCommandPair<Model>(
new RemoveButtonsToToolBindingCommand(modelLocation, currentButtons, ToolButton.this.tool),
new BindButtonsToToolCommand(modelLocation, currentButtons, ToolButton.this.tool))
);
// adding the replacement binding
dualCommands.add(new DualCommandPair<Model>(
new BindButtonsToToolCommand(modelLocation, localButtonsPressed, ToolButton.this.tool),
new RemoveButtonsToToolBindingCommand(modelLocation, localButtonsPressed, ToolButton.this.tool))
);
} else {
dualCommands.add(new DualCommandPair<Model>(
new BindButtonsToToolCommand(modelLocation, localButtonsPressed, ToolButton.this.tool),
new RemoveButtonsToToolBindingCommand(modelLocation, localButtonsPressed, ToolButton.this.tool)
));
}
}
});
branch.close();
buttonsPressed.clear();
}
}
});
// // Support for binding a key combination with a tool
// // It should be possible to both bind a key combination AND a mouse button to the same tool at the same time
// KeyListener keyListener = new KeyAdapter() {
// @Override
// public void keyPressed(KeyEvent e) {
//// System.out.println(e.isControlDown() + ":" + e.getKeyCode());
// }
//
// @Override
// public void keyTyped(KeyEvent e) {
//// System.out.println(e.isControlDown() + ":" + e.getKeyCode());
// }
// };
setFocusable(true);
// this.addKeyListener(keyListener);
// labelToolName.addKeyListener(keyListener);
// labelButton.addKeyListener(keyListener);
}
private static final Color[] BUTTON_COLORS = new Color[] {
new Color(255, 120, 10),//Color.RED,
new Color(10, 220, 10), //Color.GREEN,
new Color(10, 10, 220), //Color.BLUE,
new Color(10, 220, 220), //Color.CYAN,
new Color(220, 220, 10), //Color.ORANGE
new Color(220, 10, 220),
};
public static Color getColorForButton(int button) {
return BUTTON_COLORS[button - 1];
}
private void update(List<Integer> buttons) {
labelToolName.setText(text);
Border innerBorder = BorderFactory.createEmptyBorder(0, 5, 0, 5);
Border outerBorder;
panelButtons.removeAll();
if(buttons.size() > 0) {
for(int button: buttons) {
JLabel buttonLabel = new JLabel();
buttonLabel.setHorizontalAlignment(SwingConstants.CENTER);
buttonLabel.setForeground(getColorForButton(button));
buttonLabel.setFont(new Font(buttonLabel.getFont().getFontName(), Font.ITALIC | Font.BOLD, 16));
buttonLabel.setText("" + button);
panelButtons.add(buttonLabel);
}
panelButtons.revalidate();
outerBorder = BorderFactory.createLoweredSoftBevelBorder();
} else {
JLabel buttonLabel = new JLabel();
buttonLabel.setText(" ");
buttonLabel.setForeground(null);
panelButtons.add(buttonLabel);
outerBorder = BorderFactory.createRaisedSoftBevelBorder();
}
setBorder(BorderFactory.createCompoundBorder(outerBorder, innerBorder));
}
private void update() {
// update(button != -1 ? Arrays.asList(button) : Collections.<Integer>emptyList());
update(buttons);
}
public void setButtons(List<Integer> buttons) {
this.buttons = buttons;
update();
}
}
private static JComponent createToolButton(final LiveModel model, final ModelTranscriber modelTranscriber, ButtonGroup group, List<Integer> buttons, final int tool, final String text) {
return new ToolButton(tool, buttons, text, model, modelTranscriber);
}
private static void updateToolButton(JComponent toolButton, List<Integer> buttons) {
((ToolButton)toolButton).setButtons(buttons);
}
public static class ProductionPanel extends JPanel {
/**
*
*/
private static final long serialVersionUID = 1L;
public static final Color TARGET_OVER_COLOR = new Color(35, 89, 184);
public static final Color BIND_COLOR = new Color(25, 209, 89);
public static final Color UNBIND_COLOR = new Color(240, 34, 54);
public static final Color SELECTION_COLOR = Color.GRAY;
public static class EditPanelMouseAdapter extends MouseAdapter {
public ProductionPanel productionPanel;
public int buttonPressed;
public static final int HORIZONTAL_REGION_WEST = 0;
public static final int HORIZONTAL_REGION_CENTER = 1;
public static final int HORIZONTAL_REGION_EAST = 2;
public static final int VERTICAL_REGION_NORTH = 0;
public static final int VERTICAL_REGION_CENTER = 1;
public static final int VERTICAL_REGION_SOUTH = 2;
public EditPanelMouseAdapter(ProductionPanel productionPanel) {
this.productionPanel = productionPanel;
}
private Tool getTool(int button) {
// int toolForButton = productionPanel.livePanel.model.getToolForButton(button);
int toolForButton = productionPanel.livePanel.model.getToolForButtons(Arrays.asList(button));
if(toolForButton != -1) {
return productionPanel.livePanel.viewManager.getTools()[toolForButton];
} else {
return new Tool() {
@Override
public void mouseReleased(ProductionPanel productionPanel, MouseEvent e, ModelComponent modelOver) { }
@Override
public void mousePressed(ProductionPanel productionPanel, MouseEvent e, ModelComponent modelOver) { }
@Override
public void mouseMoved(ProductionPanel productionPanel, MouseEvent e, ModelComponent modelOver) { }
@Override
public void mouseExited(ProductionPanel productionPanel, MouseEvent e) { }
@Override
public void mouseDragged(ProductionPanel productionPanel, MouseEvent e, ModelComponent modelOver) { }
@Override
public String getName() { return null; }
@Override
public void paint(Graphics g) { }
};
}
}
public ModelComponent getModelOver(MouseEvent e) {
Point pointInContentView = SwingUtilities.convertPoint((JComponent) e.getSource(), e.getPoint(), (JComponent)productionPanel.contentView.getBindingTarget());
JComponent componentOver = (JComponent)((JComponent)productionPanel.contentView.getBindingTarget()).findComponentAt(pointInContentView);
return ModelComponent.Util.closestModelComponent(componentOver);
}
public void mousePressed(final MouseEvent e) {
final ModelComponent modelOver = getModelOver(e);
productionPanel.editPanelMouseAdapter.buttonPressed = e.getButton();
final int localButtonPressed = productionPanel.editPanelMouseAdapter.buttonPressed;
// System.out.println("Pressed button " + localButtonPressed);
productionPanel.livePanel.getModelTranscriber().executeTransient(new Runnable() {
@Override
public void run() {
getTool(localButtonPressed).mousePressed(productionPanel, e, modelOver);
}
});
}
public void mouseDragged(final MouseEvent e) {
final ModelComponent modelOver = getModelOver(e);
if(modelOver != null) {
final int localButtonPressed = productionPanel.editPanelMouseAdapter.buttonPressed;
// System.out.println("Dragged button " + localButtonPressed);
productionPanel.livePanel.getModelTranscriber().executeTransient(new Runnable() {
@Override
public void run() {
getTool(localButtonPressed).mouseDragged(productionPanel, e, modelOver);
}
});
}
}
public void mouseReleased(final MouseEvent e) {
final ModelComponent modelOver = getModelOver(e);
if(modelOver != null) {
final int localButtonPressed = productionPanel.editPanelMouseAdapter.buttonPressed;
productionPanel.editPanelMouseAdapter.buttonPressed = 0;
// System.out.println("Released button " + localButtonPressed);
productionPanel.livePanel.getModelTranscriber().executeTransient(new Runnable() {
@Override
public void run() {
getTool(localButtonPressed).mouseReleased(productionPanel, e, modelOver);
}
});
}
}
@Override
public void mouseMoved(final MouseEvent e) {
final ModelComponent modelOver = getModelOver(e);
if(modelOver != null) {
productionPanel.livePanel.getModelTranscriber().executeTransient(new Runnable() {
@Override
public void run() {
// The tool associated to button 1 is used as a "master" tool
int button = 1;
getTool(button).mouseMoved(productionPanel, e, modelOver);
}
});
}
}
}
public LivePanel livePanel;
public Binding<ModelComponent> contentView;
public EditPanelMouseAdapter editPanelMouseAdapter;
public ProductionPanel(final LivePanel livePanel, final Binding<ModelComponent> contentView) {
this.setLayout(null);
this.livePanel = livePanel;
this.contentView = contentView;
// TODO: Consider the following:
// For a selected frame, it should be possible to scroll upwards to select its immediate parent
// - and scroll downwards to select its root parents
editPanelMouseAdapter = new EditPanelMouseAdapter(this);
this.addMouseListener(editPanelMouseAdapter);
this.addMouseMotionListener(editPanelMouseAdapter);
this.setOpaque(true);
this.setBackground(new Color(0, 0, 0, 0));
}
@Override
public void paint(Graphics g) {
super.paint(g);
editPanelMouseAdapter.getTool(editPanelMouseAdapter.buttonPressed).paint(g);
}
}
public static class LivePanel extends JPanel implements ModelComponent {
/**
*
*/
private static final long serialVersionUID = 1L;
public LiveModel model;
private JPanel topPanel;
private JLayeredPane contentPane;
private RemovableListener removableListener;
public ProductionPanel productionPanel;
public ViewManager viewManager;
private ModelTranscriber modelTranscriber;
private JComponent[] buttonTools;
private final Binding<ModelComponent> contentView;
public LivePanel(final ModelComponent rootView, LiveModel model, ModelTranscriber modelTranscriber, final ViewManager viewManager) {
this.setLayout(new BorderLayout());
this.model = model;
this.viewManager = viewManager;
this.modelTranscriber = modelTranscriber;
ViewManager newViewManager = new ViewManager() {
@Override
public Factory[] getFactories() {
return viewManager.getFactories();
}
@Override
public Tool[] getTools() {
return null;
}
};
contentView = model.getContent().createView(rootView, newViewManager, modelTranscriber.extend(new ContentLocator()));
productionPanel = new ProductionPanel(this, contentView);
topPanel = new JPanel();
topPanel.setBackground(TOP_BACKGROUND_COLOR);
topPanel.setBorder(BorderFactory.createLineBorder(Color.BLACK));
contentPane = new JLayeredPane();
productionPanel.setSize(contentPane.getSize().width, contentPane.getSize().height - 1);
contentPane.addComponentListener(new ComponentListener() {
@Override
public void componentShown(ComponentEvent e) { }
@Override
public void componentResized(ComponentEvent e) {
((JComponent)contentView.getBindingTarget()).setSize(((JComponent)e.getSource()).getSize());
if(productionPanel != null) {
productionPanel.setSize(((JComponent)e.getSource()).getSize().width, ((JComponent)e.getSource()).getSize().height - 1);
}
}
@Override
public void componentMoved(ComponentEvent e) { }
@Override
public void componentHidden(ComponentEvent e) { }
});
contentPane.add((JComponent)contentView.getBindingTarget(), JLayeredPane.DEFAULT_LAYER);
this.add(topPanel, BorderLayout.NORTH);
this.add(contentPane, BorderLayout.CENTER);
removableListener = Model.RemovableListener.addObserver(model, new ObserverAdapter() {
{
initializeObserverAdapter();
}
private void initializeObserverAdapter() {
}
@Override
public void changed(Model sender, Object change, final PropogationContext propCtx, int propDistance, int changeDistance, TranscriberBranch<Model> branch) {
// if(change instanceof LiveModel.ButtonToolBindingChanged) {
// LiveModel.ButtonToolBindingChanged bindButtonChanged = (LiveModel.ButtonToolBindingChanged)change;
//
// if(bindButtonChanged.tool != -1) {
// JComponent buttonNewTool = buttonTools[bindButtonChanged.tool];
// updateToolButton(buttonNewTool, bindButtonChanged.button);
// }
// }
if(change instanceof LiveModel.ButtonsToolBindingChanged) {
LiveModel.ButtonsToolBindingChanged bindButtonChanged = (LiveModel.ButtonsToolBindingChanged)change;
if(bindButtonChanged.tool != -1) {
JComponent buttonNewTool = buttonTools[bindButtonChanged.tool];
updateToolButton(buttonNewTool, bindButtonChanged.buttons);
}
}
}
});
contentPane.add(productionPanel, JLayeredPane.MODAL_LAYER);
}
@Override
public void initialize() {
Tool[] tools = viewManager.getTools();
buttonTools = new JComponent[tools.length];
ButtonGroup group = new ButtonGroup();
for(int i = 0; i < tools.length; i++) {
Tool tool = tools[i];
// int button = model.getButtonForTool(i);
// buttonTools[i] = createToolButton(model, modelTranscriber, group, button, i, tool.getName());
List<Integer> buttons = model.getButtonsForTool(i);
buttonTools[i] = createToolButton(model, modelTranscriber, group, buttons, i, tool.getName());
}
for(JComponent buttonTool: buttonTools) {
topPanel.add(buttonTool);
}
TranscriberBranch<Model> initializationBranch = getModelTranscriber().createBranch();
initializationBranch.setOnFinishedBuilder(new RepaintRunBuilder(productionPanel.livePanel));
initializationBranch.close();
}
public Factory[] getFactories() {
return viewManager.getFactories();
}
@Override
public Model getModelBehind() {
return model;
}
@Override
public void appendContainerTransactions(LivePanel livePanel, CompositeMenuBuilder menuBuilder, ModelComponent child, TranscriberBranch<Model> branch) {
}
@Override
public void appendTransactions(ModelComponent livePanel, CompositeMenuBuilder menuBuilder, TranscriberBranch<Model> branch) {
}
@Override
public void appendDroppedTransactions(ModelComponent livePanel, ModelComponent target, Rectangle droppedBounds, CompositeMenuBuilder menuBuilder, TranscriberBranch<Model> branch) {
Model.appendGeneralDroppedTransactions(livePanel, this, target, droppedBounds, menuBuilder, branch);
}
@Override
public void appendDropTargetTransactions(
ModelComponent livePanel, ModelComponent dropped, Rectangle droppedBounds, Point dropPoint, CompositeMenuBuilder menuBuilder, TranscriberBranch<Model> branch) {
}
@Override
public ModelTranscriber getModelTranscriber() {
return modelTranscriber;
}
public void releaseBinding() {
removableListener.releaseBinding();
}
@Override
public DualCommandFactory<Model> getImplicitDropAction(ModelComponent target) {
return null;
}
@Override
public void visitTree(Action1<ModelComponent> visitAction) {
visitAction.run(this);
}
}
@Override
public Binding<ModelComponent> createView(ModelComponent rootView, ViewManager viewManager, ModelTranscriber modelTranscriber) {
this.setLocation(modelTranscriber.getModelLocator());
final LivePanel view = new LivePanel(rootView, this, modelTranscriber, viewManager);
return new Binding<ModelComponent>() {
@Override
public void releaseBinding() {
view.releaseBinding();
}
@Override
public ModelComponent getBindingTarget() {
return view;
}
};
}
public Model getContent() {
return content;
}
}
| Did a bit of cleaning up in live model.
Signed-off-by: Jakob Brandsborg Ehmsen <[email protected]> | eclipse/src/dynamake/models/LiveModel.java | Did a bit of cleaning up in live model. |
|
Java | mit | f5b8f15140cf8647ea8e18e48c8a31b3613e5941 | 0 | michaelahlers/embedded-phantom | package ahlers.phantom.embedded;
import de.flapdoodle.embed.process.config.ExecutableProcessConfig;
import de.flapdoodle.embed.process.config.ISupportConfig;
import de.flapdoodle.embed.process.distribution.IVersion;
/**
* Command line options for PhantomJS.
*
* @author [[mailto:[email protected] Michael Ahlers]]
* @see <a href="http://phantomjs.org/api/command-line.html"><em>Command Line Interface</em></a>
*/
public class PhantomConfig
extends ExecutableProcessConfig {
private final Boolean debug;
protected PhantomConfig(
final IVersion version,
final Boolean debug
) {
super(version, new ISupportConfig() {
@Override
public String getName() {
return "phantomjs";
}
@Override
public String getSupportUrl() {
return "https://github.com/michaelahlers/embedded-phantom/issues";
}
@Override
public String messageOnException(final Class<?> context, final Exception exception) {
return "";
}
});
this.debug = debug;
}
public Boolean getDebug() {
return debug;
}
public static Builder builder(final PhantomVersion version) {
return new Builder(version);
}
public static Builder builder() {
return builder(PhantomVersion.DEFAULT);
}
public static class Builder {
private final IVersion version;
private Boolean debug;
public Builder(final IVersion version) {
this.version = version;
}
public void withDebug(final Boolean debug) {
this.debug = debug;
}
public PhantomConfig build() {
return new PhantomConfig(version, debug);
}
}
}
| src/main/java/ahlers/phantom/embedded/PhantomConfig.java | package ahlers.phantom.embedded;
import de.flapdoodle.embed.process.config.ExecutableProcessConfig;
import de.flapdoodle.embed.process.config.ISupportConfig;
import de.flapdoodle.embed.process.distribution.IVersion;
/**
* Command line options for PhantomJS.
*
* @author [[mailto:[email protected] Michael Ahlers]]
* @see <a href="http://phantomjs.org/api/command-line.html"><em>Command Line Interface</em></a>
*/
public class PhantomConfig
extends ExecutableProcessConfig {
private final Boolean debug;
protected PhantomConfig(
final IVersion version,
final Boolean debug
) {
super(version, new ISupportConfig() {
@Override
public String getName() {
return "phantomjs";
}
@Override
public String getSupportUrl() {
return "https://github.com/michaelahlers/embedded-phantom/issues";
}
@Override
public String messageOnException(final Class<?> context, final Exception exception) {
return "";
}
});
this.debug = debug;
}
public Boolean getDebug() {
return debug;
}
public static Builder defaultConfig(final PhantomVersion version) {
return new Builder(version);
}
public static Builder defaultConfig() {
return defaultConfig(PhantomVersion.DEFAULT);
}
public static class Builder {
private final IVersion version;
private Boolean debug;
public Builder(final IVersion version) {
this.version = version;
}
public void withDebug(final Boolean debug) {
this.debug = debug;
}
public PhantomConfig build() {
return new PhantomConfig(version, debug);
}
}
}
| Rename builder factory
| src/main/java/ahlers/phantom/embedded/PhantomConfig.java | Rename builder factory |
|
Java | mit | 0a53ae4deb4ad428b70fc1c7666f6e1488eef81d | 0 | sfrink/CERTUS-Common | package rmi;
import java.io.InputStream;
import java.rmi.Remote;
import java.rmi.RemoteException;
import java.util.ArrayList;
import dto.CandidateDto;
import dto.ElectionDto;
import dto.UserDto;
import dto.Validator;
import dto.VoteDto;
import enumeration.Status;
import enumeration.ElectionStatus;
import enumeration.UserStatus;
public interface ServerInterface extends Remote {
public String sayHello(String name) throws RemoteException;
public Validator checkIfUsernamePasswordMatch(String email, String plainPass)
throws RemoteException;
// Users
public Validator addUser (UserDto userDto) throws RemoteException;
public Validator addUserWithPP (UserDto userDto) throws RemoteException;
public Validator addUserWithKey (UserDto userDto) throws RemoteException;
public Validator updateTempUser (UserDto userDto, String tempPassword, String sessionID) throws RemoteException;
public Validator UpdateTempUserWithPP (UserDto userDto, String tempPassword, String sessionID) throws RemoteException;
public Validator UpdateTempUserWithKey (UserDto userDto, String tempPassword, String sessionID) throws RemoteException;
public Validator selectUser(int userId, String sessionID) throws RemoteException;
public Validator selectAllUsers(String sessionID) throws RemoteException ;
public Validator editUser(UserDto userDto, String sessionID) throws RemoteException ;
public Validator editUserStatus(int userId, UserStatus userStatus, String sessionID) throws RemoteException;
public Validator generateNewKeys(int userID, String newKeyPass, String sessionID) throws RemoteException;
public Validator logOut(String sessionID) throws RemoteException;
public Validator updateUser(UserDto userDto, String sessionID) throws RemoteException;
public Validator updateUserPassword(UserDto userDto, String sessionID) throws RemoteException;
public Validator uploadPubKey(byte[] keyBytes, String sessionID) throws RemoteException;
public Validator checkIfUsernameTempPasswordMatch(String email, String plainPass)
throws RemoteException;
// Elections
public Validator selectElectionForVoter(int electionId, String sessionID) throws RemoteException;
public Validator selectElectionsForVoter(int user_id, String sessionID) throws RemoteException;
public Validator selectElectionForOwner(int electionId, String sessionID) throws RemoteException;
public Validator selectElectionsForOwner(int electionOwnerId, String sessionID) throws RemoteException;
public Validator selectElectionFullDetail (int electionId, String sessionID) throws RemoteException;
public Validator selectElectionsForAdmin(String sessionID) throws RemoteException;
public Validator selectElectionsForResults(int userId, String sessionID) throws RemoteException;
public Validator addElection(ElectionDto electionDto, String sessionID)throws RemoteException;
public Validator addAdditionalUsersToElection(ElectionDto electionDto, String sessionID) throws RemoteException;
public Validator addUserInvitations(ElectionDto electionDto, String sessionID) throws RemoteException ;
public Validator editElection(ElectionDto electionDto, String sessionID)throws RemoteException;
public Validator editElectionStatus(int electionId, ElectionStatus electionStatus, String sessionID) throws RemoteException;
public Validator openElectionAndPopulateCandidates(int electionId, String sessionID) throws RemoteException;
public Validator publishResults(int electionId, String password, String sessionID) throws RemoteException;
public Validator deleteElection(int electionId, String sessionID) throws RemoteException;
//Vote
public Validator vote(VoteDto v, String sessionID) throws RemoteException;
public Validator getTallierPublicKey(int electionId, String sessionID) throws RemoteException;
public Validator voteProgressStatusForElection(int electionId, String sessionID) throws RemoteException;
public Validator selectResults(int electionId, String sessionID) throws RemoteException;
public Validator sendTempPassword(UserDto u, String sessionID) throws RemoteException;
}
| src/rmi/ServerInterface.java | package rmi;
import java.io.InputStream;
import java.rmi.Remote;
import java.rmi.RemoteException;
import java.util.ArrayList;
import dto.CandidateDto;
import dto.ElectionDto;
import dto.UserDto;
import dto.Validator;
import dto.VoteDto;
import enumeration.Status;
import enumeration.ElectionStatus;
import enumeration.UserStatus;
public interface ServerInterface extends Remote {
public String sayHello(String name) throws RemoteException;
public Validator checkIfUsernamePasswordMatch(String email, String plainPass)
throws RemoteException;
// Users
public Validator addUser (UserDto userDto) throws RemoteException;
public Validator addUserWithPP (UserDto userDto) throws RemoteException;
public Validator addUserWithKey (UserDto userDto) throws RemoteException;
public Validator selectUser(int userId, String sessionID) throws RemoteException;
public Validator selectAllUsers(String sessionID) throws RemoteException ;
public Validator editUser(UserDto userDto, String sessionID) throws RemoteException ;
public Validator editUserStatus(int userId, UserStatus userStatus, String sessionID) throws RemoteException;
public Validator generateNewKeys(int userID, String newKeyPass, String sessionID) throws RemoteException;
public Validator logOut(String sessionID) throws RemoteException;
public Validator updateUser(UserDto userDto, String sessionID) throws RemoteException;
public Validator updateUserPassword(UserDto userDto, String sessionID) throws RemoteException;
public Validator uploadPubKey(byte[] keyBytes, String sessionID) throws RemoteException;
public Validator checkIfUsernameTempPasswordMatch(String email, String plainPass)
throws RemoteException;
// Elections
public Validator selectElectionForVoter(int electionId, String sessionID) throws RemoteException;
public Validator selectElectionsForVoter(int user_id, String sessionID) throws RemoteException;
public Validator selectElectionForOwner(int electionId, String sessionID) throws RemoteException;
public Validator selectElectionsForOwner(int electionOwnerId, String sessionID) throws RemoteException;
public Validator selectElectionFullDetail (int electionId, String sessionID) throws RemoteException;
public Validator selectElectionsForAdmin(String sessionID) throws RemoteException;
public Validator selectElectionsForResults(int userId, String sessionID) throws RemoteException;
public Validator addElection(ElectionDto electionDto, String sessionID)throws RemoteException;
public Validator addAdditionalUsersToElection(ElectionDto electionDto, String sessionID) throws RemoteException;
public Validator addUserInvitations(ElectionDto electionDto, String sessionID) throws RemoteException ;
public Validator editElection(ElectionDto electionDto, String sessionID)throws RemoteException;
public Validator editElectionStatus(int electionId, ElectionStatus electionStatus, String sessionID) throws RemoteException;
public Validator openElectionAndPopulateCandidates(int electionId, String sessionID) throws RemoteException;
public Validator publishResults(int electionId, String password, String sessionID) throws RemoteException;
public Validator deleteElection(int electionId, String sessionID) throws RemoteException;
//Vote
public Validator vote(VoteDto v, String sessionID) throws RemoteException;
public Validator getTallierPublicKey(int electionId, String sessionID) throws RemoteException;
public Validator voteProgressStatusForElection(int electionId, String sessionID) throws RemoteException;
public Validator selectResults(int electionId, String sessionID) throws RemoteException;
public Validator sendTempPassword(UserDto u, String sessionID) throws RemoteException;
}
| Invited User Story & other fixes | src/rmi/ServerInterface.java | Invited User Story & other fixes |
|
Java | mit | fdf5e0f91bb753f05fb88c6332455770b1ddd318 | 0 | auth0/Auth0.Android,auth0/Auth0.Android | package com.auth0.android.provider;
import android.app.Activity;
import android.app.Dialog;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.content.pm.ActivityInfo;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.content.res.Resources;
import android.net.Uri;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.test.espresso.intent.matcher.IntentMatchers;
import android.util.Base64;
import android.webkit.URLUtil;
import com.auth0.android.Auth0;
import com.auth0.android.Auth0Exception;
import com.auth0.android.authentication.AuthenticationException;
import com.auth0.android.result.Credentials;
import org.hamcrest.CoreMatchers;
import org.hamcrest.MatcherAssert;
import org.hamcrest.core.Is;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.robolectric.Robolectric;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
import java.io.UnsupportedEncodingException;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import static android.support.test.espresso.intent.matcher.IntentMatchers.hasComponent;
import static android.support.test.espresso.intent.matcher.IntentMatchers.hasFlag;
import static android.support.test.espresso.intent.matcher.UriMatchers.hasHost;
import static android.support.test.espresso.intent.matcher.UriMatchers.hasParamWithName;
import static android.support.test.espresso.intent.matcher.UriMatchers.hasParamWithValue;
import static android.support.test.espresso.intent.matcher.UriMatchers.hasPath;
import static android.support.test.espresso.intent.matcher.UriMatchers.hasScheme;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.Matchers.isEmptyOrNullString;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
@RunWith(RobolectricTestRunner.class)
@Config(sdk = 18)
public class WebAuthProviderTest {
private static final int REQUEST_CODE = 11;
private static final String KEY_STATE = "state";
private static final String KEY_NONCE = "nonce";
private static final long CURRENT_TIME_MS = 1234567890000L;
@Mock
private AuthCallback callback;
@Mock
private VoidCallback voidCallback;
private Activity activity;
private Auth0 account;
@Captor
private ArgumentCaptor<Auth0Exception> auth0ExceptionCaptor;
@Captor
private ArgumentCaptor<AuthenticationException> authExceptionCaptor;
@Captor
private ArgumentCaptor<Intent> intentCaptor;
@Captor
private ArgumentCaptor<AuthCallback> callbackCaptor;
@Captor
private ArgumentCaptor<VoidCallback> voidCallbackCaptor;
@Before
public void setUp() throws Exception {
MockitoAnnotations.initMocks(this);
activity = spy(Robolectric.buildActivity(Activity.class).get());
account = new Auth0("clientId", "domain");
//Next line is needed to avoid CustomTabService from being bound to Test environment
//noinspection WrongConstant
doReturn(false).when(activity).bindService(any(Intent.class), any(ServiceConnection.class), anyInt());
//Next line is needed to tell a Browser app is installed
prepareBrowserApp(true, null);
}
//** ** ** ** ** ** **//
//** ** ** ** ** ** **//
//** LOG IN FEATURE **//
//** ** ** ** ** ** **//
//** ** ** ** ** ** **//
@SuppressWarnings("deprecation")
@Test
public void shouldLoginWithAccount() throws Exception {
WebAuthProvider.login(account)
.start(activity, callback, REQUEST_CODE);
assertNotNull(WebAuthProvider.getManagerInstance());
}
@SuppressWarnings("deprecation")
@Test
public void shouldInitWithAccount() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback, REQUEST_CODE);
assertNotNull(WebAuthProvider.getManagerInstance());
}
@Test
public void shouldInitWithContext() throws Exception {
Context context = Mockito.mock(Context.class);
Resources resources = Mockito.mock(Resources.class);
when(context.getResources()).thenReturn(resources);
when(resources.getIdentifier(eq("com_auth0_client_id"), eq("string"), anyString())).thenReturn(222);
when(resources.getIdentifier(eq("com_auth0_domain"), eq("string"), anyString())).thenReturn(333);
when(context.getString(eq(222))).thenReturn("clientId");
when(context.getString(eq(333))).thenReturn("domain");
WebAuthProvider.init(context)
.start(activity, callback);
assertNotNull(WebAuthProvider.getManagerInstance());
}
@SuppressWarnings("deprecation")
@Test
public void shouldNotResumeLoginWithRequestCodeWhenNotInit() throws Exception {
Intent intentMock = Mockito.mock(Intent.class);
assertFalse(WebAuthProvider.resume(0, 0, intentMock));
}
@Test
public void shouldNotResumeLoginWithIntentWhenNotInit() throws Exception {
Intent intentMock = Mockito.mock(Intent.class);
assertFalse(WebAuthProvider.resume(0, 0, intentMock));
}
@Test
public void shouldNotResumeWithIntentWhenNotInit() throws Exception {
Intent intentMock = Mockito.mock(Intent.class);
assertFalse(WebAuthProvider.resume(intentMock));
}
//scheme
@Test
public void shouldHaveDefaultSchemeOnLogin() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithName("redirect_uri"));
Uri redirectUri = Uri.parse(uri.getQueryParameter("redirect_uri"));
assertThat(redirectUri, hasScheme("https"));
}
@Test
public void shouldSetSchemeOnLogin() throws Exception {
WebAuthProvider.init(account)
.withScheme("myapp")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithName("redirect_uri"));
Uri redirectUri = Uri.parse(uri.getQueryParameter("redirect_uri"));
assertThat(redirectUri, hasScheme("myapp"));
}
//connection
@Test
public void shouldNotHaveDefaultConnectionOnLogin() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, not(hasParamWithName("connection")));
}
@Test
public void shouldSetConnectionFromParametersOnLogin() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("connection", (Object) "my-connection");
WebAuthProvider.init(account)
.withConnection("some-connection")
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("connection", "my-connection"));
}
@Test
public void shouldSetConnectionFromSetterOnLogin() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("connection", (Object) "my-connection");
WebAuthProvider.init(account)
.withParameters(parameters)
.withConnection("some-connection")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("connection", "some-connection"));
}
@Test
public void shouldNotOverrideConnectionValueWithDefaultConnectionOnLogin() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("connection", (Object) "my-connection");
WebAuthProvider.init(account)
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("connection", "my-connection"));
}
@Test
public void shouldSetConnectionOnLogin() throws Exception {
WebAuthProvider.init(account)
.withConnection("some-connection")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("connection", "some-connection"));
}
//audience
@Test
public void shouldNotHaveDefaultAudienceOnLogin() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, not(hasParamWithName("audience")));
}
@Test
public void shouldSetAudienceFromParametersOnLogin() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("audience", (Object) "https://mydomain.auth0.com/myapi");
WebAuthProvider.init(account)
.withAudience("https://google.com/apis")
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("audience", "https://mydomain.auth0.com/myapi"));
}
@Test
public void shouldSetAudienceFromSetterOnLogin() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("audience", (Object) "https://mydomain.auth0.com/myapi");
WebAuthProvider.init(account)
.withParameters(parameters)
.withAudience("https://google.com/apis")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("audience", "https://google.com/apis"));
}
@Test
public void shouldNotOverrideAudienceValueWithDefaultAudienceOnLogin() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("audience", (Object) "https://mydomain.auth0.com/myapi");
WebAuthProvider.init(account)
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("audience", "https://mydomain.auth0.com/myapi"));
}
@Test
public void shouldSetAudienceOnLogin() throws Exception {
WebAuthProvider.init(account)
.withAudience("https://google.com/apis")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("audience", "https://google.com/apis"));
}
//scope
@Test
public void shouldHaveDefaultScopeOnLogin() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("scope", "openid"));
}
@Test
public void shouldSetScopeFromParametersOnLogin() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("scope", (Object) "openid email contacts");
WebAuthProvider.init(account)
.withScope("profile super_scope")
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("scope", "openid email contacts"));
}
@Test
public void shouldSetScopeFromSetterOnLogin() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("scope", (Object) "openid email contacts");
WebAuthProvider.init(account)
.withParameters(parameters)
.withScope("profile super_scope")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("scope", "profile super_scope"));
}
@Test
public void shouldNotOverrideScopeValueWithDefaultScopeOnLogin() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("scope", (Object) "openid email contacts");
WebAuthProvider.init(account)
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("scope", "openid email contacts"));
}
@Test
public void shouldSetScopeOnLogin() throws Exception {
WebAuthProvider.init(account)
.withScope("profile super_scope")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("scope", "profile super_scope"));
}
//connection scope
@Test
public void shouldNotHaveDefaultConnectionScopeOnLogin() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, not(hasParamWithName("connection_scope")));
}
@Test
public void shouldSetConnectionScopeFromParametersOnLogin() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("connection_scope", (Object) "openid,email,contacts");
WebAuthProvider.init(account)
.withConnectionScope("profile", "super_scope")
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("connection_scope", "openid,email,contacts"));
}
@Test
public void shouldSetConnectionScopeFromSetterOnLogin() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("connection_scope", (Object) "openid,email,contacts");
WebAuthProvider.init(account)
.withParameters(parameters)
.withConnectionScope("profile", "super_scope")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("connection_scope", "profile,super_scope"));
}
@Test
public void shouldNotOverrideConnectionScopeValueWithDefaultConnectionScopeOnLogin() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("connection_scope", (Object) "openid,email,contacts");
WebAuthProvider.init(account)
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("connection_scope", "openid,email,contacts"));
}
@Test
public void shouldSetConnectionScopeOnLogin() throws Exception {
WebAuthProvider.init(account)
.withConnectionScope("the", "scope", "of", "my", "connection")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("connection_scope", "the,scope,of,my,connection"));
}
//state
@Test
public void shouldHaveDefaultStateOnLogin() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue(is("state"), not(isEmptyOrNullString())));
}
@Test
public void shouldSetNonNullStateOnLogin() throws Exception {
WebAuthProvider.init(account)
.withState(null)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue(is("state"), not(isEmptyOrNullString())));
}
@Test
public void shouldSetStateFromParametersOnLogin() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("state", (Object) "1234567890");
WebAuthProvider.init(account)
.withState("abcdefg")
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("state", "1234567890"));
}
@Test
public void shouldSetStateFromSetterOnLogin() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("state", (Object) "1234567890");
WebAuthProvider.init(account)
.withParameters(parameters)
.withState("abcdefg")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("state", "abcdefg"));
}
@Test
public void shouldNotOverrideStateValueWithDefaultStateOnLogin() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("state", (Object) "1234567890");
WebAuthProvider.init(account)
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("state", "1234567890"));
}
@Test
public void shouldSetStateOnLogin() throws Exception {
WebAuthProvider.init(account)
.withState("abcdefg")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("state", "abcdefg"));
}
//nonce
@Test
public void shouldNotSetNonceByDefaultIfResponseTypeIsCodeOnLogin() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.CODE)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, not(hasParamWithName("nonce")));
}
@Test
public void shouldNotSetNonceByDefaultIfResponseTypeIsTokenOnLogin() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.TOKEN)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, not(hasParamWithName("nonce")));
}
@Test
public void shouldHaveDefaultNonceOnLogin() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue(is("nonce"), not(isEmptyOrNullString())));
}
@Test
public void shouldSetNonNullNonceOnLogin() throws Exception {
WebAuthProvider.init(account)
.withNonce(null)
.withResponseType(ResponseType.ID_TOKEN)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue(is("nonce"), not(isEmptyOrNullString())));
}
@Test
public void shouldSetUserNonceIfResponseTypeIsTokenOnLogin() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.TOKEN)
.withNonce("1234567890")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("nonce", "1234567890"));
}
@Test
public void shouldSetUserNonceIfResponseTypeIsCodeOnLogin() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.CODE)
.withNonce("1234567890")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("nonce", "1234567890"));
}
@Test
public void shouldSetNonceFromParametersOnLogin() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("nonce", (Object) "1234567890");
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN)
.withNonce("abcdefg")
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("nonce", "1234567890"));
}
@Test
public void shouldSetNonceFromSetterOnLogin() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("nonce", (Object) "1234567890");
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN)
.withParameters(parameters)
.withNonce("abcdefg")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("nonce", "abcdefg"));
}
@Test
public void shouldNotOverrideNonceValueWithDefaultNonceOnLogin() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("nonce", (Object) "1234567890");
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN)
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("nonce", "1234567890"));
}
@Test
public void shouldSetNonceOnLogin() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN)
.withNonce("abcdefg")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("nonce", "abcdefg"));
}
@Test
public void shouldGenerateRandomStringIfDefaultValueMissingOnLogin() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
String random1 = OAuthManager.getRandomString(null);
String random2 = OAuthManager.getRandomString(null);
assertThat(random1, is(notNullValue()));
assertThat(random2, is(notNullValue()));
assertThat(random1, is(not(equalTo(random2))));
}
@Test
public void shouldNotGenerateRandomStringIfDefaultValuePresentOnLogin() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
String random1 = OAuthManager.getRandomString("some");
String random2 = OAuthManager.getRandomString("some");
assertThat(random1, is("some"));
assertThat(random2, is("some"));
}
// auth0 related
@Test
public void shouldHaveClientIdOnLogin() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("client_id", "clientId"));
}
@Test
public void shouldHaveTelemetryInfoOnLogin() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue(is("auth0Client"), not(isEmptyOrNullString())));
}
@Test
public void shouldHaveRedirectUriOnLogin() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri.getQueryParameter("redirect_uri"), is("https://domain/android/com.auth0.android.auth0.test/callback"));
}
//response type
@Test
public void shouldHaveDefaultResponseTypeOnLogin() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("response_type", "code"));
}
@Test
public void shouldSetResponseTypeTokenOnLogin() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.TOKEN)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("response_type", "token"));
}
@Test
public void shouldSetResponseTypeIdTokenOnLogin() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("response_type", "id_token"));
}
@Test
public void shouldSetResponseTypeCodeOnLogin() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.CODE)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("response_type", "code"));
}
@Test
public void shouldSetResponseTypeCodeTokenOnLogin() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.CODE | ResponseType.TOKEN)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("response_type", "code token"));
}
@Test
public void shouldSetResponseTypeCodeIdTokenOnLogin() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.CODE | ResponseType.ID_TOKEN)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("response_type", "code id_token"));
}
@Test
public void shouldSetResponseTypeIdTokenTokenOnLogin() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN | ResponseType.TOKEN)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("response_type", "id_token token"));
}
@Test
public void shouldSetResponseTypeCodeIdTokenTokenOnLogin() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.CODE | ResponseType.ID_TOKEN | ResponseType.TOKEN)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("response_type", "code id_token token"));
}
@Test
public void shouldSetNonNullAuthenticationParametersOnLogin() throws Exception {
Map<String, Object> parameters = new HashMap<>();
parameters.put("a", "valid");
parameters.put("b", null);
WebAuthProvider.init(account)
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("a", "valid"));
assertThat(uri, not(hasParamWithName("b")));
}
@Test
public void shouldBuildAuthorizeURIWithoutNullsOnLogin() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
Set<String> params = uri.getQueryParameterNames();
for (String name : params) {
assertThat(uri, not(hasParamWithValue(name, null)));
assertThat(uri, not(hasParamWithValue(name, "null")));
}
}
@Test
public void shouldBuildAuthorizeURIWithCorrectSchemeHostAndPathOnLogin() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN)
.withState("a-state")
.withNonce("a-nonce")
.start(activity, callback);
Uri baseUriString = Uri.parse(account.getAuthorizeUrl());
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasScheme(baseUriString.getScheme()));
assertThat(uri, hasHost(baseUriString.getHost()));
assertThat(uri, hasPath(baseUriString.getPath()));
}
@Test
public void shouldBuildAuthorizeURIWithResponseTypeIdTokenOnLogin() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN)
.withState("a-state")
.withNonce("a-nonce")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("nonce", "a-nonce"));
assertThat(uri, not(hasParamWithName("code_challenge")));
assertThat(uri, not(hasParamWithName("code_challenge_method")));
assertThat(uri, hasParamWithValue("response_type", "id_token"));
}
@Test
public void shouldBuildAuthorizeURIWithResponseTypeTokenOnLogin() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.TOKEN)
.withState("a-state")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, not(hasParamWithName("nonce")));
assertThat(uri, not(hasParamWithName("code_challenge")));
assertThat(uri, not(hasParamWithName("code_challenge_method")));
assertThat(uri, hasParamWithValue("response_type", "token"));
}
@Test
public void shouldBuildAuthorizeURIWithResponseTypeCodeOnLogin() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.CODE)
.withState("a-state")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, not(hasParamWithName("nonce")));
assertThat(uri, hasParamWithValue(is("code_challenge"), not(isEmptyOrNullString())));
assertThat(uri, hasParamWithValue("code_challenge_method", "S256"));
assertThat(uri, hasParamWithValue("response_type", "code"));
}
@SuppressWarnings("deprecation")
@Test
public void shouldStartLoginWithBrowserCustomTabsOptions() throws Exception {
CustomTabsOptions options = mock(CustomTabsOptions.class);
WebAuthProvider.init(account)
.withCustomTabsOptions(options)
.useCodeGrant(false)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Intent intent = intentCaptor.getValue();
assertThat(intent, is(notNullValue()));
assertThat(intent, hasComponent(AuthenticationActivity.class.getName()));
assertThat(intent, hasFlag(Intent.FLAG_ACTIVITY_CLEAR_TOP));
assertThat(intent.getData(), is(nullValue()));
Bundle extras = intentCaptor.getValue().getExtras();
assertThat(extras.getParcelable(AuthenticationActivity.EXTRA_AUTHORIZE_URI), is(notNullValue()));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CONNECTION_NAME), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_FULL_SCREEN), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_BROWSER), is(true));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CT_OPTIONS), is(true));
assertThat(extras.getBoolean(AuthenticationActivity.EXTRA_USE_BROWSER), is(true));
assertThat((CustomTabsOptions) extras.getParcelable(AuthenticationActivity.EXTRA_CT_OPTIONS), is(options));
}
@SuppressWarnings("deprecation")
@Test
public void shouldStartLoginWithBrowser() throws Exception {
WebAuthProvider.init(account)
.useBrowser(true)
.useCodeGrant(false)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Intent intent = intentCaptor.getValue();
assertThat(intent, is(notNullValue()));
assertThat(intent, hasComponent(AuthenticationActivity.class.getName()));
assertThat(intent, hasFlag(Intent.FLAG_ACTIVITY_CLEAR_TOP));
assertThat(intent.getData(), is(nullValue()));
Bundle extras = intentCaptor.getValue().getExtras();
assertThat(extras.getParcelable(AuthenticationActivity.EXTRA_AUTHORIZE_URI), is(notNullValue()));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CONNECTION_NAME), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_FULL_SCREEN), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_BROWSER), is(true));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CT_OPTIONS), is(true));
assertThat(extras.getBoolean(AuthenticationActivity.EXTRA_USE_BROWSER), is(true));
assertThat(extras.getParcelable(AuthenticationActivity.EXTRA_CT_OPTIONS), is(nullValue()));
}
@SuppressWarnings("deprecation")
@Test
public void shouldStartLoginWithWebViewAndDefaultConnection() throws Exception {
WebAuthProvider.init(account)
.useBrowser(false)
.useCodeGrant(false)
.useFullscreen(false)
.start(activity, callback, REQUEST_CODE);
verify(activity).startActivityForResult(intentCaptor.capture(), any(Integer.class));
Intent intent = intentCaptor.getValue();
assertThat(intent, is(notNullValue()));
assertThat(intent, hasComponent(AuthenticationActivity.class.getName()));
assertThat(intent, hasFlag(Intent.FLAG_ACTIVITY_CLEAR_TOP));
assertThat(intent.getData(), is(nullValue()));
Bundle extras = intentCaptor.getValue().getExtras();
assertThat(extras.getParcelable(AuthenticationActivity.EXTRA_AUTHORIZE_URI), is(notNullValue()));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CONNECTION_NAME), is(true));
assertThat(extras.getString(AuthenticationActivity.EXTRA_CONNECTION_NAME), is(nullValue()));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_FULL_SCREEN), is(true));
assertThat(extras.getBoolean(AuthenticationActivity.EXTRA_USE_FULL_SCREEN), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_BROWSER), is(true));
assertThat(extras.getBoolean(AuthenticationActivity.EXTRA_USE_BROWSER), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CT_OPTIONS), is(false));
}
@SuppressWarnings("deprecation")
@Test
public void shouldStartLoginWithWebViewAndCustomConnection() throws Exception {
WebAuthProvider.init(account)
.useBrowser(false)
.withConnection("my-connection")
.useCodeGrant(false)
.useFullscreen(true)
.start(activity, callback);
verify(activity).startActivityForResult(intentCaptor.capture(), any(Integer.class));
Intent intent = intentCaptor.getValue();
assertThat(intent, is(notNullValue()));
assertThat(intent, hasComponent(AuthenticationActivity.class.getName()));
assertThat(intent, hasFlag(Intent.FLAG_ACTIVITY_CLEAR_TOP));
assertThat(intent.getData(), is(nullValue()));
Bundle extras = intent.getExtras();
assertThat(extras.getParcelable(AuthenticationActivity.EXTRA_AUTHORIZE_URI), is(notNullValue()));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CONNECTION_NAME), is(true));
assertThat(extras.getString(AuthenticationActivity.EXTRA_CONNECTION_NAME), is("my-connection"));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_FULL_SCREEN), is(true));
assertThat(extras.getBoolean(AuthenticationActivity.EXTRA_USE_FULL_SCREEN), is(true));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_BROWSER), is(true));
assertThat(extras.getBoolean(AuthenticationActivity.EXTRA_USE_BROWSER), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CT_OPTIONS), is(false));
}
@SuppressWarnings("deprecation")
@Test
public void shouldResumeLoginWithRequestCodeWithResponseTypeIdToken() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN)
.start(activity, callback, REQUEST_CODE);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
String sentState = uri.getQueryParameter(KEY_STATE);
String sentNonce = uri.getQueryParameter(KEY_NONCE);
assertThat(sentState, is(not(isEmptyOrNullString())));
assertThat(sentNonce, is(not(isEmptyOrNullString())));
Intent intent = createAuthIntent(createHash(customNonceJWT(sentNonce), null, null, null, null, sentState, null, null));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
verify(callback).onSuccess(any(Credentials.class));
}
@Test
public void shouldResumeLoginWithIntentWithResponseTypeIdToken() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
String sentState = uri.getQueryParameter(KEY_STATE);
String sentNonce = uri.getQueryParameter(KEY_NONCE);
assertThat(sentState, is(not(isEmptyOrNullString())));
assertThat(sentNonce, is(not(isEmptyOrNullString())));
Intent intent = createAuthIntent(createHash(customNonceJWT(sentNonce), null, null, null, null, sentState, null, null));
assertTrue(WebAuthProvider.resume(intent));
verify(callback).onSuccess(any(Credentials.class));
}
@SuppressWarnings("deprecation")
@Test
public void shouldStartLoginWithValidRequestCode() throws Exception {
final Credentials credentials = Mockito.mock(Credentials.class);
PKCE pkce = Mockito.mock(PKCE.class);
Mockito.doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
callback.onSuccess(credentials);
return null;
}
}).when(pkce).getToken(any(String.class), eq(callback));
WebAuthProvider.init(account)
.useCodeGrant(true)
.withPKCE(pkce)
.start(activity, callback);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", null, null));
int DEFAULT_REQUEST_CODE = 110;
assertTrue(WebAuthProvider.resume(DEFAULT_REQUEST_CODE, Activity.RESULT_OK, intent));
}
@SuppressWarnings("deprecation")
@Test
public void shouldResumeLoginWithIntentWithCodeGrant() throws Exception {
Date expiresAt = new Date();
final Credentials codeCredentials = new Credentials("codeId", "codeAccess", "codeType", "codeRefresh", expiresAt, "codeScope");
PKCE pkce = Mockito.mock(PKCE.class);
Mockito.doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
callbackCaptor.getValue().onSuccess(codeCredentials);
return null;
}
}).when(pkce).getToken(any(String.class), callbackCaptor.capture());
WebAuthProvider.init(account)
.useCodeGrant(true)
.withPKCE(pkce)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
String sentState = uri.getQueryParameter(KEY_STATE);
assertThat(sentState, is(not(isEmptyOrNullString())));
Intent intent = createAuthIntent(createHash("urlId", "urlAccess", "urlRefresh", "urlType", 1111L, sentState, null, null));
assertTrue(WebAuthProvider.resume(intent));
ArgumentCaptor<Credentials> credentialsCaptor = ArgumentCaptor.forClass(Credentials.class);
verify(callback).onSuccess(credentialsCaptor.capture());
assertThat(credentialsCaptor.getValue(), is(notNullValue()));
assertThat(credentialsCaptor.getValue().getIdToken(), is("codeId"));
assertThat(credentialsCaptor.getValue().getAccessToken(), is("codeAccess"));
assertThat(credentialsCaptor.getValue().getRefreshToken(), is("codeRefresh"));
assertThat(credentialsCaptor.getValue().getType(), is("codeType"));
assertThat(credentialsCaptor.getValue().getExpiresAt(), is(expiresAt));
assertThat(credentialsCaptor.getValue().getScope(), is("codeScope"));
}
@SuppressWarnings("deprecation")
@Test
public void shouldResumeLoginWithRequestCodeWithCodeGrant() throws Exception {
Date expiresAt = new Date();
final Credentials codeCredentials = new Credentials("codeId", "codeAccess", "codeType", "codeRefresh", expiresAt, "codeScope");
PKCE pkce = Mockito.mock(PKCE.class);
Mockito.doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
callbackCaptor.getValue().onSuccess(codeCredentials);
return null;
}
}).when(pkce).getToken(any(String.class), callbackCaptor.capture());
WebAuthProvider.init(account)
.useCodeGrant(true)
.withPKCE(pkce)
.start(activity, callback, REQUEST_CODE);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
String sentState = uri.getQueryParameter(KEY_STATE);
assertThat(sentState, is(not(isEmptyOrNullString())));
Intent intent = createAuthIntent(createHash("urlId", "urlAccess", "urlRefresh", "urlType", 1111L, sentState, null, null));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
ArgumentCaptor<Credentials> credentialsCaptor = ArgumentCaptor.forClass(Credentials.class);
verify(callback).onSuccess(credentialsCaptor.capture());
assertThat(credentialsCaptor.getValue(), is(notNullValue()));
assertThat(credentialsCaptor.getValue().getIdToken(), is("codeId"));
assertThat(credentialsCaptor.getValue().getAccessToken(), is("codeAccess"));
assertThat(credentialsCaptor.getValue().getRefreshToken(), is("codeRefresh"));
assertThat(credentialsCaptor.getValue().getType(), is("codeType"));
assertThat(credentialsCaptor.getValue().getExpiresAt(), is(expiresAt));
assertThat(credentialsCaptor.getValue().getScope(), is("codeScope"));
}
@SuppressWarnings("deprecation")
@Test
public void shouldResumeLoginWithIntentWithImplicitGrant() throws Exception {
WebAuthProvider.init(account)
.useCodeGrant(false)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
String sentState = uri.getQueryParameter(KEY_STATE);
assertThat(sentState, is(not(isEmptyOrNullString())));
Intent intent = createAuthIntent(createHash("urlId", "urlAccess", "urlRefresh", "urlType", 1111L, sentState, null, null));
assertTrue(WebAuthProvider.resume(intent));
ArgumentCaptor<Credentials> credentialsCaptor = ArgumentCaptor.forClass(Credentials.class);
verify(callback).onSuccess(credentialsCaptor.capture());
assertThat(credentialsCaptor.getValue(), is(notNullValue()));
assertThat(credentialsCaptor.getValue().getIdToken(), is("urlId"));
assertThat(credentialsCaptor.getValue().getAccessToken(), is("urlAccess"));
assertThat(credentialsCaptor.getValue().getRefreshToken(), is("urlRefresh"));
assertThat(credentialsCaptor.getValue().getType(), is("urlType"));
assertThat(credentialsCaptor.getValue().getExpiresIn(), is(1111L));
}
@SuppressWarnings("deprecation")
@Test
public void shouldResumeLoginWithRequestCodeWithImplicitGrant() throws Exception {
WebAuthProvider.init(account)
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
String sentState = uri.getQueryParameter(KEY_STATE);
assertThat(sentState, is(not(isEmptyOrNullString())));
Intent intent = createAuthIntent(createHash("urlId", "urlAccess", "urlRefresh", "urlType", 1111L, sentState, null, null));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
ArgumentCaptor<Credentials> credentialsCaptor = ArgumentCaptor.forClass(Credentials.class);
verify(callback).onSuccess(credentialsCaptor.capture());
assertThat(credentialsCaptor.getValue(), is(notNullValue()));
assertThat(credentialsCaptor.getValue().getIdToken(), is("urlId"));
assertThat(credentialsCaptor.getValue().getAccessToken(), is("urlAccess"));
assertThat(credentialsCaptor.getValue().getRefreshToken(), is("urlRefresh"));
assertThat(credentialsCaptor.getValue().getType(), is("urlType"));
assertThat(credentialsCaptor.getValue().getExpiresIn(), is(1111L));
}
@SuppressWarnings("deprecation")
@Test
public void shouldResumeLoginWithRequestCodeWhenResultCancelled() throws Exception {
WebAuthProvider.init(account)
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
Intent intent = createAuthIntent(null);
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_CANCELED, intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("a0.authentication_canceled"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("The user closed the browser app and the authentication was canceled."));
}
@SuppressWarnings("deprecation")
@Test
public void shouldResumeLoginWithIntentWhenResultCancelled() throws Exception {
WebAuthProvider.init(account)
.useCodeGrant(false)
.start(activity, callback);
Intent intent = createAuthIntent(null);
assertTrue(WebAuthProvider.resume(intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("a0.authentication_canceled"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("The user closed the browser app and the authentication was canceled."));
}
@Test
public void shouldCalculateExpiresAtDateOnResumeLogin() throws Exception {
WebAuthProvider.init(account)
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
OAuthManager managerInstance = (OAuthManager) WebAuthProvider.getManagerInstance();
managerInstance.setCurrentTimeInMillis(CURRENT_TIME_MS);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
String sentState = uri.getQueryParameter(KEY_STATE);
assertThat(sentState, is(not(isEmptyOrNullString())));
Intent intent = createAuthIntent(createHash("urlId", "urlAccess", "urlRefresh", "urlType", 1111L, sentState, null, null));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
ArgumentCaptor<Credentials> credentialsCaptor = ArgumentCaptor.forClass(Credentials.class);
verify(callback).onSuccess(credentialsCaptor.capture());
assertThat(credentialsCaptor.getValue(), is(notNullValue()));
long expirationTime = CURRENT_TIME_MS + 1111L * 1000;
assertThat(credentialsCaptor.getValue().getExpiresAt(), is(notNullValue()));
assertThat(credentialsCaptor.getValue().getExpiresAt().getTime(), is(expirationTime));
}
@SuppressWarnings("deprecation")
@Test
public void shouldReThrowAnyFailedCodeExchangeDialogOnLogin() throws Exception {
final Dialog dialog = Mockito.mock(Dialog.class);
PKCE pkce = Mockito.mock(PKCE.class);
Mockito.doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
callbackCaptor.getValue().onFailure(dialog);
return null;
}
}).when(pkce).getToken(any(String.class), callbackCaptor.capture());
WebAuthProvider.init(account)
.withState("1234567890")
.useCodeGrant(true)
.withPKCE(pkce)
.start(activity, callback);
Intent intent = createAuthIntent(createHash("urlId", "urlAccess", "urlRefresh", "urlType", 1111L, "1234567890", null, null));
assertTrue(WebAuthProvider.resume(intent));
verify(callback).onFailure(dialog);
}
@SuppressWarnings("deprecation")
@Test
public void shouldReThrowAnyFailedCodeExchangeExceptionOnLogin() throws Exception {
final AuthenticationException exception = Mockito.mock(AuthenticationException.class);
PKCE pkce = Mockito.mock(PKCE.class);
Mockito.doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
callbackCaptor.getValue().onFailure(exception);
return null;
}
}).when(pkce).getToken(any(String.class), callbackCaptor.capture());
WebAuthProvider.init(account)
.withState("1234567890")
.useCodeGrant(true)
.withPKCE(pkce)
.start(activity, callback);
Intent intent = createAuthIntent(createHash("urlId", "urlAccess", "urlRefresh", "urlType", 1111L, "1234567890", null, null));
assertTrue(WebAuthProvider.resume(intent));
verify(callback).onFailure(exception);
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeLoginWithIntentWithAccessDenied() throws Exception {
WebAuthProvider.init(account)
.withState("1234567890")
.useCodeGrant(false)
.start(activity, callback);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", "access_denied", null));
assertTrue(WebAuthProvider.resume(intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("access_denied"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("Permissions were not granted. Try again."));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeLoginWithRequestCodeWithAccessDenied() throws Exception {
WebAuthProvider.init(account)
.withState("1234567890")
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", "access_denied", null));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("access_denied"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("Permissions were not granted. Try again."));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeLoginWithIntentWithRuleError() throws Exception {
WebAuthProvider.init(account)
.withState("1234567890")
.useCodeGrant(false)
.start(activity, callback);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", "unauthorized", "Custom Rule Error"));
assertTrue(WebAuthProvider.resume(intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("unauthorized"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("Custom Rule Error"));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeLoginWithRequestCodeWithRuleError() throws Exception {
WebAuthProvider.init(account)
.withState("1234567890")
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", "unauthorized", "Custom Rule Error"));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("unauthorized"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("Custom Rule Error"));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeLoginWithIntentWithConfigurationInvalid() throws Exception {
WebAuthProvider.init(account)
.withState("1234567890")
.useCodeGrant(false)
.start(activity, callback);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", "some other error", null));
assertTrue(WebAuthProvider.resume(intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("a0.invalid_configuration"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("The application isn't configured properly for the social connection. Please check your Auth0's application configuration"));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeLoginWithRequestCodeWithConfigurationInvalid() throws Exception {
WebAuthProvider.init(account)
.withState("1234567890")
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", "some other error", null));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("a0.invalid_configuration"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("The application isn't configured properly for the social connection. Please check your Auth0's application configuration"));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeLoginWithIntentWithLoginRequired() throws Exception {
WebAuthProvider.init(account)
.withState("1234567890")
.useCodeGrant(false)
.start(activity, callback);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", "login_required", "Login Required"));
assertTrue(WebAuthProvider.resume(intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("login_required"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("Login Required"));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeLoginWithRequestCodeWithLoginRequired() throws Exception {
WebAuthProvider.init(account)
.withState("1234567890")
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", "login_required", "Login Required"));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("login_required"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("Login Required"));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeLoginWithIntentWithInvalidState() throws Exception {
WebAuthProvider.init(account)
.withState("abcdefghijk")
.useCodeGrant(false)
.start(activity, callback);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", null, null));
assertTrue(WebAuthProvider.resume(intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("access_denied"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("The received state is invalid. Try again."));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeLoginWithRequestCodeWithInvalidState() throws Exception {
WebAuthProvider.init(account)
.withState("abcdefghijk")
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", null, null));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("access_denied"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("The received state is invalid. Try again."));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeLoginWithIntentWithInvalidNonce() throws Exception {
WebAuthProvider.init(account)
.withState("state")
.withNonce("0987654321")
.withResponseType(ResponseType.ID_TOKEN)
.start(activity, callback);
Intent intent = createAuthIntent(createHash("eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJub25jZSI6IjEyMzQ1Njc4OTAifQ.oUb6xFIEPJQrFbel_Js4SaOwpFfM_kxHxI7xDOHgghk", null, null, null, null, "state", null, null));
assertTrue(WebAuthProvider.resume(intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("access_denied"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("The received nonce is invalid. Try again."));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeLoginWithRequestCodeWithInvalidNonce() throws Exception {
WebAuthProvider.init(account)
.withState("state")
.withNonce("0987654321")
.withResponseType(ResponseType.ID_TOKEN)
.start(activity, callback, REQUEST_CODE);
Intent intent = createAuthIntent(createHash("eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJub25jZSI6IjEyMzQ1Njc4OTAifQ.oUb6xFIEPJQrFbel_Js4SaOwpFfM_kxHxI7xDOHgghk", null, null, null, null, "state", null, null));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("access_denied"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("The received nonce is invalid. Try again."));
}
@SuppressWarnings("deprecation")
@Test
public void shouldFailToResumeLoginWithUnexpectedRequestCode() throws Exception {
verifyNoMoreInteractions(callback);
WebAuthProvider.init(account)
.useCodeGrant(false)
.start(activity, callback);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", null, null));
assertFalse(WebAuthProvider.resume(999, Activity.RESULT_OK, intent));
}
@SuppressWarnings("deprecation")
@Test
public void shouldFailToResumeLoginWithResultNotOK() throws Exception {
verifyNoMoreInteractions(callback);
WebAuthProvider.init(account)
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", null, null));
assertFalse(WebAuthProvider.resume(REQUEST_CODE, 999, intent));
}
@SuppressWarnings("deprecation")
@Test
public void shouldFailToResumeLoginWithIntentWithEmptyUriValues() throws Exception {
verifyNoMoreInteractions(callback);
WebAuthProvider.init(account)
.withState("abcdefghijk")
.useCodeGrant(false)
.start(activity, callback);
Intent intent = createAuthIntent("");
assertFalse(WebAuthProvider.resume(intent));
}
@SuppressWarnings("deprecation")
@Test
public void shouldFailToResumeLoginWithRequestCodeWithEmptyUriValues() throws Exception {
verifyNoMoreInteractions(callback);
WebAuthProvider.init(account)
.withState("abcdefghijk")
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
Intent intent = createAuthIntent("");
assertFalse(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
}
@Test
public void shouldFailToResumeLoginWithIntentWithoutFirstInitProvider() throws Exception {
Intent intent = createAuthIntent("");
assertFalse(WebAuthProvider.resume(intent));
}
@SuppressWarnings("deprecation")
@Test
public void shouldFailToResumeLoginWithRequestCodeWithoutFirstInitProvider() throws Exception {
Intent intent = createAuthIntent("");
assertFalse(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
}
@SuppressWarnings("deprecation")
@Test
public void shouldResumeLoginWithIntentWithNullIntent() throws Exception {
WebAuthProvider.init(account)
.withState("abcdefghijk")
.useCodeGrant(false)
.start(activity, callback);
assertFalse(WebAuthProvider.resume(null));
}
@SuppressWarnings("deprecation")
@Test
public void shouldFailToResumeLoginWithRequestCodeWithNullIntent() throws Exception {
WebAuthProvider.init(account)
.withState("abcdefghijk")
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
assertFalse(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, null));
}
@Test
public void shouldClearInstanceAfterSuccessLoginWithIntent() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
assertThat(WebAuthProvider.getManagerInstance(), is(notNullValue()));
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", null, null));
assertTrue(WebAuthProvider.resume(intent));
assertThat(WebAuthProvider.getManagerInstance(), is(nullValue()));
}
@SuppressWarnings("deprecation")
@Test
public void shouldClearInstanceAfterSuccessLoginWithRequestCode() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback, REQUEST_CODE);
assertThat(WebAuthProvider.getManagerInstance(), is(notNullValue()));
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", null, null));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
assertThat(WebAuthProvider.getManagerInstance(), is(nullValue()));
}
@Test
public void shouldFailToStartLoginWithBrowserWhenNoBrowserAppIsInstalled() throws Exception {
prepareBrowserApp(false, null);
WebAuthProvider.init(account)
.useBrowser(true)
.start(activity, callback);
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("a0.browser_not_available"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("No Browser application installed to perform web authentication."));
assertThat(WebAuthProvider.getManagerInstance(), is(nullValue()));
}
@SuppressWarnings("deprecation")
@Test
public void shouldNotFailToStartLoginWithWebviewWhenNoBrowserAppIsInstalled() throws Exception {
prepareBrowserApp(false, null);
WebAuthProvider.init(account)
.useBrowser(false)
.start(activity, callback, REQUEST_CODE);
verify(activity).startActivityForResult(intentCaptor.capture(), any(Integer.class));
Intent intent = intentCaptor.getValue();
assertThat(intent, is(notNullValue()));
assertThat(intent, hasComponent(AuthenticationActivity.class.getName()));
assertThat(intent, hasFlag(Intent.FLAG_ACTIVITY_CLEAR_TOP));
assertThat(intent.getData(), is(nullValue()));
verify(callback, never()).onFailure(any(AuthenticationException.class));
}
//** ** ** ** ** ** **//
//** ** ** ** ** ** **//
//** LOG OUT FEATURE **//
//** ** ** ** ** ** **//
//** ** ** ** ** ** **//
@Test
public void shouldInitLogoutWithAccount() throws Exception {
WebAuthProvider.logout(account)
.start(activity, voidCallback);
assertNotNull(WebAuthProvider.getManagerInstance());
}
//scheme
@Test
public void shouldHaveDefaultSchemeOnLogout() throws Exception {
WebAuthProvider.logout(account)
.start(activity, voidCallback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithName("returnTo"));
Uri returnToUri = Uri.parse(uri.getQueryParameter("returnTo"));
assertThat(returnToUri, hasScheme("https"));
}
@Test
public void shouldSetSchemeOnLogout() throws Exception {
WebAuthProvider.logout(account)
.withScheme("myapp")
.start(activity, voidCallback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithName("returnTo"));
Uri returnToUri = Uri.parse(uri.getQueryParameter("returnTo"));
assertThat(returnToUri, hasScheme("myapp"));
}
// client id
@Test
public void shouldAlwaysSetClientIdOnLogout() throws Exception {
WebAuthProvider.logout(account)
.start(activity, voidCallback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("client_id", "clientId"));
}
// auth0 related
@Test
public void shouldHaveTelemetryInfoOnLogout() throws Exception {
WebAuthProvider.logout(account)
.start(activity, voidCallback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue(is("auth0Client"), not(isEmptyOrNullString())));
}
@Test
public void shouldHaveReturnToUriOnLogout() throws Exception {
WebAuthProvider.logout(account)
.start(activity, voidCallback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri.getQueryParameter("returnTo"), is("https://domain/android/com.auth0.android.auth0.test/callback"));
}
// Launch log out
@SuppressWarnings("deprecation")
@Test
public void shouldStartLogout() throws Exception {
WebAuthProvider.logout(account)
.start(activity, voidCallback);
verify(activity).startActivity(intentCaptor.capture());
Intent intent = intentCaptor.getValue();
assertThat(intent, is(notNullValue()));
assertThat(intent, hasComponent(AuthenticationActivity.class.getName()));
assertThat(intent, hasFlag(Intent.FLAG_ACTIVITY_CLEAR_TOP));
assertThat(intent.getData(), is(nullValue()));
Bundle extras = intentCaptor.getValue().getExtras();
assertThat(extras.getParcelable(AuthenticationActivity.EXTRA_AUTHORIZE_URI), is(notNullValue()));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CONNECTION_NAME), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_FULL_SCREEN), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_BROWSER), is(true));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CT_OPTIONS), is(true));
assertThat(extras.getBoolean(AuthenticationActivity.EXTRA_USE_BROWSER), is(true));
assertThat(extras.getParcelable(AuthenticationActivity.EXTRA_CT_OPTIONS), is(nullValue()));
}
@Test
public void shouldStartLogoutWithCustomTabsOptions() throws Exception {
CustomTabsOptions options = mock(CustomTabsOptions.class);
WebAuthProvider.logout(account)
.withCustomTabsOptions(options)
.start(activity, voidCallback);
verify(activity).startActivity(intentCaptor.capture());
Intent intent = intentCaptor.getValue();
assertThat(intent, is(notNullValue()));
assertThat(intent, hasComponent(AuthenticationActivity.class.getName()));
assertThat(intent, hasFlag(Intent.FLAG_ACTIVITY_CLEAR_TOP));
assertThat(intent.getData(), is(nullValue()));
Bundle extras = intentCaptor.getValue().getExtras();
assertThat(extras.getParcelable(AuthenticationActivity.EXTRA_AUTHORIZE_URI), is(notNullValue()));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CONNECTION_NAME), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_FULL_SCREEN), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_BROWSER), is(true));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CT_OPTIONS), is(true));
assertThat(extras.getBoolean(AuthenticationActivity.EXTRA_USE_BROWSER), is(true));
assertThat((CustomTabsOptions) extras.getParcelable(AuthenticationActivity.EXTRA_CT_OPTIONS), is(options));
}
@Test
public void shouldFailToStartLogoutWhenNoBrowserAppIsInstalled() throws Exception {
prepareBrowserApp(false, null);
WebAuthProvider.logout(account)
.start(activity, voidCallback);
verify(voidCallback).onFailure(auth0ExceptionCaptor.capture());
assertThat(auth0ExceptionCaptor.getValue(), is(notNullValue()));
assertThat(auth0ExceptionCaptor.getValue().getMessage(), is("Cannot perform web log out"));
Throwable cause = auth0ExceptionCaptor.getValue().getCause();
assertThat(cause, is(CoreMatchers.<Throwable>instanceOf(ActivityNotFoundException.class)));
assertThat(cause.getMessage(), is("No Browser application installed."));
assertThat(WebAuthProvider.getManagerInstance(), is(nullValue()));
}
@Test
public void shouldResumeLogoutSuccessfullyWithIntent() throws Exception {
WebAuthProvider.logout(account)
.start(activity, voidCallback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
Intent intent = createAuthIntent("");
assertTrue(WebAuthProvider.resume(intent));
verify(voidCallback).onSuccess(any(Void.class));
}
@Test
public void shouldResumeLogoutFailingWithIntent() throws Exception {
WebAuthProvider.logout(account)
.start(activity, voidCallback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
//null data translates to result canceled
Intent intent = createAuthIntent(null);
assertTrue(WebAuthProvider.resume(intent));
verify(voidCallback).onFailure(auth0ExceptionCaptor.capture());
assertThat(auth0ExceptionCaptor.getValue(), is(notNullValue()));
assertThat(auth0ExceptionCaptor.getValue().getMessage(), is("The user closed the browser app so the logout was cancelled."));
assertThat(WebAuthProvider.getManagerInstance(), is(nullValue()));
}
@Test
public void shouldClearLogoutManagerInstanceAfterSuccessfulLogout() throws Exception {
WebAuthProvider.logout(account)
.start(activity, voidCallback);
assertThat(WebAuthProvider.getManagerInstance(), is(notNullValue()));
Intent intent = createAuthIntent("");
assertTrue(WebAuthProvider.resume(intent));
assertThat(WebAuthProvider.getManagerInstance(), is(nullValue()));
}
//** ** ** ** ** ** **//
//** ** ** ** ** ** **//
//**Tests for Utilities**//
//** ** ** ** ** ** **//
//** ** ** ** ** ** **//
@Test
public void shouldHaveBrowserAppInstalled() {
ArgumentCaptor<Intent> intentCaptor = ArgumentCaptor.forClass(Intent.class);
prepareBrowserApp(true, intentCaptor);
boolean hasBrowserApp = WebAuthProvider.hasBrowserAppInstalled(activity.getPackageManager());
MatcherAssert.assertThat(hasBrowserApp, Is.is(true));
MatcherAssert.assertThat(intentCaptor.getValue(), Is.is(IntentMatchers.hasAction(Intent.ACTION_VIEW)));
MatcherAssert.assertThat(URLUtil.isValidUrl(intentCaptor.getValue().getDataString()), Is.is(true));
}
@Test
public void shouldNotHaveBrowserAppInstalled() {
ArgumentCaptor<Intent> intentCaptor = ArgumentCaptor.forClass(Intent.class);
prepareBrowserApp(false, intentCaptor);
boolean hasBrowserApp = WebAuthProvider.hasBrowserAppInstalled(activity.getPackageManager());
MatcherAssert.assertThat(hasBrowserApp, Is.is(false));
MatcherAssert.assertThat(intentCaptor.getValue(), Is.is(IntentMatchers.hasAction(Intent.ACTION_VIEW)));
MatcherAssert.assertThat(URLUtil.isValidUrl(intentCaptor.getValue().getDataString()), Is.is(true));
}
//** ** ** ** ** ** **//
//** ** ** ** ** ** **//
//** Helpers Functions**//
//** ** ** ** ** ** **//
//** ** ** ** ** ** **//
private Intent createAuthIntent(@Nullable String hash) {
Intent intent = new Intent();
if (hash == null) {
return intent;
}
Uri validUri = Uri.parse("https://domain.auth0.com/android/package/callback" + hash);
intent.setData(validUri);
return intent;
}
private void prepareBrowserApp(boolean isAppInstalled, @Nullable ArgumentCaptor<Intent> intentCaptor) {
PackageManager pm = mock(PackageManager.class);
ResolveInfo info = null;
if (isAppInstalled) {
info = mock(ResolveInfo.class);
ApplicationInfo appInfo = mock(ApplicationInfo.class);
appInfo.packageName = "com.auth0.test";
ActivityInfo actInfo = mock(ActivityInfo.class);
actInfo.applicationInfo = appInfo;
actInfo.name = "Auth0 Browser";
info.activityInfo = actInfo;
}
when(pm.resolveActivity(intentCaptor != null ? intentCaptor.capture() : any(Intent.class), eq(PackageManager.MATCH_DEFAULT_ONLY))).thenReturn(info);
when(activity.getPackageManager()).thenReturn(pm);
}
private String createHash(@Nullable String idToken, @Nullable String accessToken, @Nullable String refreshToken, @Nullable String tokenType, @Nullable Long expiresIn, @Nullable String state, @Nullable String error, @Nullable String errorDescription) {
String hash = "#";
if (accessToken != null) {
hash = hash.concat("access_token=")
.concat(accessToken)
.concat("&");
}
if (idToken != null) {
hash = hash.concat("id_token=")
.concat(idToken)
.concat("&");
}
if (refreshToken != null) {
hash = hash.concat("refresh_token=")
.concat(refreshToken)
.concat("&");
}
if (tokenType != null) {
hash = hash.concat("token_type=")
.concat(tokenType)
.concat("&");
}
if (expiresIn != null) {
hash = hash.concat("expires_in=")
.concat(String.valueOf(expiresIn))
.concat("&");
}
if (state != null) {
hash = hash.concat("state=")
.concat(state)
.concat("&");
}
if (error != null) {
hash = hash.concat("error=")
.concat(error)
.concat("&");
}
if (errorDescription != null) {
hash = hash.concat("error_description=")
.concat(errorDescription)
.concat("&");
}
if (hash.endsWith("&")) {
hash = hash.substring(0, hash.length() - 1);
}
return hash.length() == 1 ? "" : hash;
}
private String customNonceJWT(@NonNull String nonce) {
String header = encodeString("{}");
String bodyBuilder = "{\"nonce\":\"" + nonce + "\"}";
String body = encodeString(bodyBuilder);
String signature = "sign";
return String.format("%s.%s.%s", header, body, signature);
}
private String encodeString(String source) {
byte[] bytes = Base64.encode(source.getBytes(), Base64.URL_SAFE | Base64.NO_WRAP | Base64.NO_PADDING);
String res = "";
try {
res = new String(bytes, "UTF-8");
} catch (UnsupportedEncodingException ignored) {
}
return res;
}
} | auth0/src/test/java/com/auth0/android/provider/WebAuthProviderTest.java | package com.auth0.android.provider;
import android.app.Activity;
import android.app.Dialog;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.content.pm.ActivityInfo;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.content.res.Resources;
import android.net.Uri;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.test.espresso.intent.matcher.IntentMatchers;
import android.util.Base64;
import android.webkit.URLUtil;
import com.auth0.android.Auth0;
import com.auth0.android.Auth0Exception;
import com.auth0.android.authentication.AuthenticationException;
import com.auth0.android.result.Credentials;
import org.hamcrest.CoreMatchers;
import org.hamcrest.MatcherAssert;
import org.hamcrest.core.Is;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.robolectric.Robolectric;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
import java.io.UnsupportedEncodingException;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import static android.support.test.espresso.intent.matcher.IntentMatchers.hasComponent;
import static android.support.test.espresso.intent.matcher.IntentMatchers.hasFlag;
import static android.support.test.espresso.intent.matcher.UriMatchers.hasHost;
import static android.support.test.espresso.intent.matcher.UriMatchers.hasParamWithName;
import static android.support.test.espresso.intent.matcher.UriMatchers.hasParamWithValue;
import static android.support.test.espresso.intent.matcher.UriMatchers.hasPath;
import static android.support.test.espresso.intent.matcher.UriMatchers.hasScheme;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.Matchers.isEmptyOrNullString;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
@RunWith(RobolectricTestRunner.class)
@Config(sdk = 18)
public class WebAuthProviderTest {
private static final int REQUEST_CODE = 11;
private static final String KEY_STATE = "state";
private static final String KEY_NONCE = "nonce";
private static final long CURRENT_TIME_MS = 1234567890000L;
@Mock
private AuthCallback callback;
@Mock
private VoidCallback voidCallback;
private Activity activity;
private Auth0 account;
@Captor
private ArgumentCaptor<Auth0Exception> auth0ExceptionCaptor;
@Captor
private ArgumentCaptor<AuthenticationException> authExceptionCaptor;
@Captor
private ArgumentCaptor<Intent> intentCaptor;
@Captor
private ArgumentCaptor<AuthCallback> callbackCaptor;
@Captor
private ArgumentCaptor<VoidCallback> voidCallbackCaptor;
@Before
public void setUp() throws Exception {
MockitoAnnotations.initMocks(this);
activity = spy(Robolectric.buildActivity(Activity.class).get());
account = new Auth0("clientId", "domain");
//Next line is needed to avoid CustomTabService from being bound to Test environment
//noinspection WrongConstant
doReturn(false).when(activity).bindService(any(Intent.class), any(ServiceConnection.class), anyInt());
//Next line is needed to tell a Browser app is installed
prepareBrowserApp(true, null);
}
//** ** ** ** ** ** **//
//** ** ** ** ** ** **//
//** LOG IN FEATURE **//
//** ** ** ** ** ** **//
//** ** ** ** ** ** **//
@SuppressWarnings("deprecation")
@Test
public void shouldLoginWithAccount() throws Exception {
WebAuthProvider.login(account)
.start(activity, callback, REQUEST_CODE);
assertNotNull(WebAuthProvider.getManagerInstance());
}
@SuppressWarnings("deprecation")
@Test
public void shouldInitWithAccount() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback, REQUEST_CODE);
assertNotNull(WebAuthProvider.getManagerInstance());
}
@Test
public void shouldInitWithContext() throws Exception {
Context context = Mockito.mock(Context.class);
Resources resources = Mockito.mock(Resources.class);
when(context.getResources()).thenReturn(resources);
when(resources.getIdentifier(eq("com_auth0_client_id"), eq("string"), anyString())).thenReturn(222);
when(resources.getIdentifier(eq("com_auth0_domain"), eq("string"), anyString())).thenReturn(333);
when(context.getString(eq(222))).thenReturn("clientId");
when(context.getString(eq(333))).thenReturn("domain");
WebAuthProvider.init(context)
.start(activity, callback);
assertNotNull(WebAuthProvider.getManagerInstance());
}
@SuppressWarnings("deprecation")
@Test
public void shouldNotResumeWithRequestCodeWhenNotInit() throws Exception {
Intent intentMock = Mockito.mock(Intent.class);
assertFalse(WebAuthProvider.resume(0, 0, intentMock));
}
@Test
public void shouldNotResumeWithIntentWhenNotInit() throws Exception {
Intent intentMock = Mockito.mock(Intent.class);
assertFalse(WebAuthProvider.resume(intentMock));
}
//scheme
@Test
public void shouldHaveDefaultScheme() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithName("redirect_uri"));
Uri redirectUri = Uri.parse(uri.getQueryParameter("redirect_uri"));
assertThat(redirectUri, hasScheme("https"));
}
@Test
public void shouldSetScheme() throws Exception {
WebAuthProvider.init(account)
.withScheme("myapp")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithName("redirect_uri"));
Uri redirectUri = Uri.parse(uri.getQueryParameter("redirect_uri"));
assertThat(redirectUri, hasScheme("myapp"));
}
//connection
@Test
public void shouldNotHaveDefaultConnection() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, not(hasParamWithName("connection")));
}
@Test
public void shouldSetConnectionFromParameters() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("connection", (Object) "my-connection");
WebAuthProvider.init(account)
.withConnection("some-connection")
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("connection", "my-connection"));
}
@Test
public void shouldSetConnectionFromSetter() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("connection", (Object) "my-connection");
WebAuthProvider.init(account)
.withParameters(parameters)
.withConnection("some-connection")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("connection", "some-connection"));
}
@Test
public void shouldNotOverrideConnectionValueWithDefaultConnection() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("connection", (Object) "my-connection");
WebAuthProvider.init(account)
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("connection", "my-connection"));
}
@Test
public void shouldSetConnection() throws Exception {
WebAuthProvider.init(account)
.withConnection("some-connection")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("connection", "some-connection"));
}
//audience
@Test
public void shouldNotHaveDefaultAudience() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, not(hasParamWithName("audience")));
}
@Test
public void shouldSetAudienceFromParameters() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("audience", (Object) "https://mydomain.auth0.com/myapi");
WebAuthProvider.init(account)
.withAudience("https://google.com/apis")
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("audience", "https://mydomain.auth0.com/myapi"));
}
@Test
public void shouldSetAudienceFromSetter() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("audience", (Object) "https://mydomain.auth0.com/myapi");
WebAuthProvider.init(account)
.withParameters(parameters)
.withAudience("https://google.com/apis")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("audience", "https://google.com/apis"));
}
@Test
public void shouldNotOverrideAudienceValueWithDefaultAudience() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("audience", (Object) "https://mydomain.auth0.com/myapi");
WebAuthProvider.init(account)
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("audience", "https://mydomain.auth0.com/myapi"));
}
@Test
public void shouldSetAudience() throws Exception {
WebAuthProvider.init(account)
.withAudience("https://google.com/apis")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("audience", "https://google.com/apis"));
}
//scope
@Test
public void shouldHaveDefaultScope() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("scope", "openid"));
}
@Test
public void shouldSetScopeFromParameters() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("scope", (Object) "openid email contacts");
WebAuthProvider.init(account)
.withScope("profile super_scope")
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("scope", "openid email contacts"));
}
@Test
public void shouldSetScopeFromSetter() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("scope", (Object) "openid email contacts");
WebAuthProvider.init(account)
.withParameters(parameters)
.withScope("profile super_scope")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("scope", "profile super_scope"));
}
@Test
public void shouldNotOverrideScopeValueWithDefaultScope() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("scope", (Object) "openid email contacts");
WebAuthProvider.init(account)
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("scope", "openid email contacts"));
}
@Test
public void shouldSetScope() throws Exception {
WebAuthProvider.init(account)
.withScope("profile super_scope")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("scope", "profile super_scope"));
}
//connection scope
@Test
public void shouldNotHaveDefaultConnectionScope() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, not(hasParamWithName("connection_scope")));
}
@Test
public void shouldSetConnectionScopeFromParameters() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("connection_scope", (Object) "openid,email,contacts");
WebAuthProvider.init(account)
.withConnectionScope("profile", "super_scope")
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("connection_scope", "openid,email,contacts"));
}
@Test
public void shouldSetConnectionScopeFromSetter() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("connection_scope", (Object) "openid,email,contacts");
WebAuthProvider.init(account)
.withParameters(parameters)
.withConnectionScope("profile", "super_scope")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("connection_scope", "profile,super_scope"));
}
@Test
public void shouldNotOverrideConnectionScopeValueWithDefaultConnectionScope() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("connection_scope", (Object) "openid,email,contacts");
WebAuthProvider.init(account)
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("connection_scope", "openid,email,contacts"));
}
@Test
public void shouldSetConnectionScope() throws Exception {
WebAuthProvider.init(account)
.withConnectionScope("the", "scope", "of", "my", "connection")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("connection_scope", "the,scope,of,my,connection"));
}
//state
@Test
public void shouldHaveDefaultState() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue(is("state"), not(isEmptyOrNullString())));
}
@Test
public void shouldSetNonNullState() throws Exception {
WebAuthProvider.init(account)
.withState(null)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue(is("state"), not(isEmptyOrNullString())));
}
@Test
public void shouldSetStateFromParameters() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("state", (Object) "1234567890");
WebAuthProvider.init(account)
.withState("abcdefg")
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("state", "1234567890"));
}
@Test
public void shouldSetStateFromSetter() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("state", (Object) "1234567890");
WebAuthProvider.init(account)
.withParameters(parameters)
.withState("abcdefg")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("state", "abcdefg"));
}
@Test
public void shouldNotOverrideStateValueWithDefaultState() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("state", (Object) "1234567890");
WebAuthProvider.init(account)
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("state", "1234567890"));
}
@Test
public void shouldSetState() throws Exception {
WebAuthProvider.init(account)
.withState("abcdefg")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("state", "abcdefg"));
}
//nonce
@Test
public void shouldNotSetNonceByDefaultIfResponseTypeIsCode() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.CODE)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, not(hasParamWithName("nonce")));
}
@Test
public void shouldNotSetNonceByDefaultIfResponseTypeIsToken() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.TOKEN)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, not(hasParamWithName("nonce")));
}
@Test
public void shouldHaveDefaultNonce() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue(is("nonce"), not(isEmptyOrNullString())));
}
@Test
public void shouldSetNonNullNonce() throws Exception {
WebAuthProvider.init(account)
.withNonce(null)
.withResponseType(ResponseType.ID_TOKEN)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue(is("nonce"), not(isEmptyOrNullString())));
}
@Test
public void shouldSetUserNonceIfResponseTypeIsToken() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.TOKEN)
.withNonce("1234567890")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("nonce", "1234567890"));
}
@Test
public void shouldSetUserNonceIfResponseTypeIsCode() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.CODE)
.withNonce("1234567890")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("nonce", "1234567890"));
}
@Test
public void shouldSetNonceFromParameters() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("nonce", (Object) "1234567890");
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN)
.withNonce("abcdefg")
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("nonce", "1234567890"));
}
@Test
public void shouldSetNonceFromSetter() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("nonce", (Object) "1234567890");
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN)
.withParameters(parameters)
.withNonce("abcdefg")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("nonce", "abcdefg"));
}
@Test
public void shouldNotOverrideNonceValueWithDefaultNonce() throws Exception {
Map<String, Object> parameters = Collections.singletonMap("nonce", (Object) "1234567890");
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN)
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("nonce", "1234567890"));
}
@Test
public void shouldSetNonce() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN)
.withNonce("abcdefg")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("nonce", "abcdefg"));
}
@Test
public void shouldGenerateRandomStringIfDefaultValueMissing() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
String random1 = OAuthManager.getRandomString(null);
String random2 = OAuthManager.getRandomString(null);
assertThat(random1, is(notNullValue()));
assertThat(random2, is(notNullValue()));
assertThat(random1, is(not(equalTo(random2))));
}
@Test
public void shouldNotGenerateRandomStringIfDefaultValuePresent() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
String random1 = OAuthManager.getRandomString("some");
String random2 = OAuthManager.getRandomString("some");
assertThat(random1, is("some"));
assertThat(random2, is("some"));
}
// auth0 related
@Test
public void shouldHaveClientId() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("client_id", "clientId"));
}
@Test
public void shouldHaveTelemetryInfo() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue(is("auth0Client"), not(isEmptyOrNullString())));
}
@Test
public void shouldHaveRedirectUri() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri.getQueryParameter("redirect_uri"), is("https://domain/android/com.auth0.android.auth0.test/callback"));
}
//response type
@Test
public void shouldHaveDefaultResponseType() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("response_type", "code"));
}
@Test
public void shouldSetResponseTypeToken() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.TOKEN)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("response_type", "token"));
}
@Test
public void shouldSetResponseTypeIdToken() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("response_type", "id_token"));
}
@Test
public void shouldSetResponseTypeCode() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.CODE)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("response_type", "code"));
}
@Test
public void shouldSetResponseTypeCodeToken() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.CODE | ResponseType.TOKEN)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("response_type", "code token"));
}
@Test
public void shouldSetResponseTypeCodeIdToken() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.CODE | ResponseType.ID_TOKEN)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("response_type", "code id_token"));
}
@Test
public void shouldSetResponseTypeIdTokenToken() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN | ResponseType.TOKEN)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("response_type", "id_token token"));
}
@Test
public void shouldSetResponseTypeCodeIdTokenToken() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.CODE | ResponseType.ID_TOKEN | ResponseType.TOKEN)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("response_type", "code id_token token"));
}
@Test
public void shouldSetNonNullAuthenticationParameters() throws Exception {
Map<String, Object> parameters = new HashMap<>();
parameters.put("a", "valid");
parameters.put("b", null);
WebAuthProvider.init(account)
.withParameters(parameters)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("a", "valid"));
assertThat(uri, not(hasParamWithName("b")));
}
@Test
public void shouldBuildAuthorizeURIWithoutNulls() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
Set<String> params = uri.getQueryParameterNames();
for (String name : params) {
assertThat(uri, not(hasParamWithValue(name, null)));
assertThat(uri, not(hasParamWithValue(name, "null")));
}
}
@Test
public void shouldBuildAuthorizeURIWithCorrectSchemeHostAndPath() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN)
.withState("a-state")
.withNonce("a-nonce")
.start(activity, callback);
Uri baseUriString = Uri.parse(account.getAuthorizeUrl());
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasScheme(baseUriString.getScheme()));
assertThat(uri, hasHost(baseUriString.getHost()));
assertThat(uri, hasPath(baseUriString.getPath()));
}
@Test
public void shouldBuildAuthorizeURIWithResponseTypeIdToken() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN)
.withState("a-state")
.withNonce("a-nonce")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("nonce", "a-nonce"));
assertThat(uri, not(hasParamWithName("code_challenge")));
assertThat(uri, not(hasParamWithName("code_challenge_method")));
assertThat(uri, hasParamWithValue("response_type", "id_token"));
}
@Test
public void shouldBuildAuthorizeURIWithResponseTypeToken() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.TOKEN)
.withState("a-state")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, not(hasParamWithName("nonce")));
assertThat(uri, not(hasParamWithName("code_challenge")));
assertThat(uri, not(hasParamWithName("code_challenge_method")));
assertThat(uri, hasParamWithValue("response_type", "token"));
}
@Test
public void shouldBuildAuthorizeURIWithResponseTypeCode() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.CODE)
.withState("a-state")
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, not(hasParamWithName("nonce")));
assertThat(uri, hasParamWithValue(is("code_challenge"), not(isEmptyOrNullString())));
assertThat(uri, hasParamWithValue("code_challenge_method", "S256"));
assertThat(uri, hasParamWithValue("response_type", "code"));
}
@SuppressWarnings("deprecation")
@Test
public void shouldStartWithBrowserCustomTabsOptions() throws Exception {
CustomTabsOptions options = mock(CustomTabsOptions.class);
WebAuthProvider.init(account)
.withCustomTabsOptions(options)
.useCodeGrant(false)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Intent intent = intentCaptor.getValue();
assertThat(intent, is(notNullValue()));
assertThat(intent, hasComponent(AuthenticationActivity.class.getName()));
assertThat(intent, hasFlag(Intent.FLAG_ACTIVITY_CLEAR_TOP));
assertThat(intent.getData(), is(nullValue()));
Bundle extras = intentCaptor.getValue().getExtras();
assertThat(extras.getParcelable(AuthenticationActivity.EXTRA_AUTHORIZE_URI), is(notNullValue()));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CONNECTION_NAME), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_FULL_SCREEN), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_BROWSER), is(true));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CT_OPTIONS), is(true));
assertThat(extras.getBoolean(AuthenticationActivity.EXTRA_USE_BROWSER), is(true));
assertThat((CustomTabsOptions) extras.getParcelable(AuthenticationActivity.EXTRA_CT_OPTIONS), is(options));
}
@SuppressWarnings("deprecation")
@Test
public void shouldStartWithBrowser() throws Exception {
WebAuthProvider.init(account)
.useBrowser(true)
.useCodeGrant(false)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Intent intent = intentCaptor.getValue();
assertThat(intent, is(notNullValue()));
assertThat(intent, hasComponent(AuthenticationActivity.class.getName()));
assertThat(intent, hasFlag(Intent.FLAG_ACTIVITY_CLEAR_TOP));
assertThat(intent.getData(), is(nullValue()));
Bundle extras = intentCaptor.getValue().getExtras();
assertThat(extras.getParcelable(AuthenticationActivity.EXTRA_AUTHORIZE_URI), is(notNullValue()));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CONNECTION_NAME), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_FULL_SCREEN), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_BROWSER), is(true));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CT_OPTIONS), is(true));
assertThat(extras.getBoolean(AuthenticationActivity.EXTRA_USE_BROWSER), is(true));
assertThat(extras.getParcelable(AuthenticationActivity.EXTRA_CT_OPTIONS), is(nullValue()));
}
@SuppressWarnings("deprecation")
@Test
public void shouldStartWithWebViewAndDefaultConnection() throws Exception {
WebAuthProvider.init(account)
.useBrowser(false)
.useCodeGrant(false)
.useFullscreen(false)
.start(activity, callback, REQUEST_CODE);
verify(activity).startActivityForResult(intentCaptor.capture(), any(Integer.class));
Intent intent = intentCaptor.getValue();
assertThat(intent, is(notNullValue()));
assertThat(intent, hasComponent(AuthenticationActivity.class.getName()));
assertThat(intent, hasFlag(Intent.FLAG_ACTIVITY_CLEAR_TOP));
assertThat(intent.getData(), is(nullValue()));
Bundle extras = intentCaptor.getValue().getExtras();
assertThat(extras.getParcelable(AuthenticationActivity.EXTRA_AUTHORIZE_URI), is(notNullValue()));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CONNECTION_NAME), is(true));
assertThat(extras.getString(AuthenticationActivity.EXTRA_CONNECTION_NAME), is(nullValue()));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_FULL_SCREEN), is(true));
assertThat(extras.getBoolean(AuthenticationActivity.EXTRA_USE_FULL_SCREEN), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_BROWSER), is(true));
assertThat(extras.getBoolean(AuthenticationActivity.EXTRA_USE_BROWSER), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CT_OPTIONS), is(false));
}
@SuppressWarnings("deprecation")
@Test
public void shouldStartWithWebViewAndCustomConnection() throws Exception {
WebAuthProvider.init(account)
.useBrowser(false)
.withConnection("my-connection")
.useCodeGrant(false)
.useFullscreen(true)
.start(activity, callback);
verify(activity).startActivityForResult(intentCaptor.capture(), any(Integer.class));
Intent intent = intentCaptor.getValue();
assertThat(intent, is(notNullValue()));
assertThat(intent, hasComponent(AuthenticationActivity.class.getName()));
assertThat(intent, hasFlag(Intent.FLAG_ACTIVITY_CLEAR_TOP));
assertThat(intent.getData(), is(nullValue()));
Bundle extras = intent.getExtras();
assertThat(extras.getParcelable(AuthenticationActivity.EXTRA_AUTHORIZE_URI), is(notNullValue()));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CONNECTION_NAME), is(true));
assertThat(extras.getString(AuthenticationActivity.EXTRA_CONNECTION_NAME), is("my-connection"));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_FULL_SCREEN), is(true));
assertThat(extras.getBoolean(AuthenticationActivity.EXTRA_USE_FULL_SCREEN), is(true));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_BROWSER), is(true));
assertThat(extras.getBoolean(AuthenticationActivity.EXTRA_USE_BROWSER), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CT_OPTIONS), is(false));
}
@SuppressWarnings("deprecation")
@Test
public void shouldResumeWithRequestCodeWithResponseTypeIdToken() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN)
.start(activity, callback, REQUEST_CODE);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
String sentState = uri.getQueryParameter(KEY_STATE);
String sentNonce = uri.getQueryParameter(KEY_NONCE);
assertThat(sentState, is(not(isEmptyOrNullString())));
assertThat(sentNonce, is(not(isEmptyOrNullString())));
Intent intent = createAuthIntent(createHash(customNonceJWT(sentNonce), null, null, null, null, sentState, null, null));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
verify(callback).onSuccess(any(Credentials.class));
}
@Test
public void shouldResumeWithIntentWithResponseTypeIdToken() throws Exception {
WebAuthProvider.init(account)
.withResponseType(ResponseType.ID_TOKEN)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
String sentState = uri.getQueryParameter(KEY_STATE);
String sentNonce = uri.getQueryParameter(KEY_NONCE);
assertThat(sentState, is(not(isEmptyOrNullString())));
assertThat(sentNonce, is(not(isEmptyOrNullString())));
Intent intent = createAuthIntent(createHash(customNonceJWT(sentNonce), null, null, null, null, sentState, null, null));
assertTrue(WebAuthProvider.resume(intent));
verify(callback).onSuccess(any(Credentials.class));
}
@SuppressWarnings("deprecation")
@Test
public void shouldStartWithValidRequestCode() throws Exception {
final Credentials credentials = Mockito.mock(Credentials.class);
PKCE pkce = Mockito.mock(PKCE.class);
Mockito.doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
callback.onSuccess(credentials);
return null;
}
}).when(pkce).getToken(any(String.class), eq(callback));
WebAuthProvider.init(account)
.useCodeGrant(true)
.withPKCE(pkce)
.start(activity, callback);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", null, null));
int DEFAULT_REQUEST_CODE = 110;
assertTrue(WebAuthProvider.resume(DEFAULT_REQUEST_CODE, Activity.RESULT_OK, intent));
}
@SuppressWarnings("deprecation")
@Test
public void shouldResumeWithIntentWithCodeGrant() throws Exception {
Date expiresAt = new Date();
final Credentials codeCredentials = new Credentials("codeId", "codeAccess", "codeType", "codeRefresh", expiresAt, "codeScope");
PKCE pkce = Mockito.mock(PKCE.class);
Mockito.doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
callbackCaptor.getValue().onSuccess(codeCredentials);
return null;
}
}).when(pkce).getToken(any(String.class), callbackCaptor.capture());
WebAuthProvider.init(account)
.useCodeGrant(true)
.withPKCE(pkce)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
String sentState = uri.getQueryParameter(KEY_STATE);
assertThat(sentState, is(not(isEmptyOrNullString())));
Intent intent = createAuthIntent(createHash("urlId", "urlAccess", "urlRefresh", "urlType", 1111L, sentState, null, null));
assertTrue(WebAuthProvider.resume(intent));
ArgumentCaptor<Credentials> credentialsCaptor = ArgumentCaptor.forClass(Credentials.class);
verify(callback).onSuccess(credentialsCaptor.capture());
assertThat(credentialsCaptor.getValue(), is(notNullValue()));
assertThat(credentialsCaptor.getValue().getIdToken(), is("codeId"));
assertThat(credentialsCaptor.getValue().getAccessToken(), is("codeAccess"));
assertThat(credentialsCaptor.getValue().getRefreshToken(), is("codeRefresh"));
assertThat(credentialsCaptor.getValue().getType(), is("codeType"));
assertThat(credentialsCaptor.getValue().getExpiresAt(), is(expiresAt));
assertThat(credentialsCaptor.getValue().getScope(), is("codeScope"));
}
@SuppressWarnings("deprecation")
@Test
public void shouldResumeWithRequestCodeWithCodeGrant() throws Exception {
Date expiresAt = new Date();
final Credentials codeCredentials = new Credentials("codeId", "codeAccess", "codeType", "codeRefresh", expiresAt, "codeScope");
PKCE pkce = Mockito.mock(PKCE.class);
Mockito.doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
callbackCaptor.getValue().onSuccess(codeCredentials);
return null;
}
}).when(pkce).getToken(any(String.class), callbackCaptor.capture());
WebAuthProvider.init(account)
.useCodeGrant(true)
.withPKCE(pkce)
.start(activity, callback, REQUEST_CODE);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
String sentState = uri.getQueryParameter(KEY_STATE);
assertThat(sentState, is(not(isEmptyOrNullString())));
Intent intent = createAuthIntent(createHash("urlId", "urlAccess", "urlRefresh", "urlType", 1111L, sentState, null, null));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
ArgumentCaptor<Credentials> credentialsCaptor = ArgumentCaptor.forClass(Credentials.class);
verify(callback).onSuccess(credentialsCaptor.capture());
assertThat(credentialsCaptor.getValue(), is(notNullValue()));
assertThat(credentialsCaptor.getValue().getIdToken(), is("codeId"));
assertThat(credentialsCaptor.getValue().getAccessToken(), is("codeAccess"));
assertThat(credentialsCaptor.getValue().getRefreshToken(), is("codeRefresh"));
assertThat(credentialsCaptor.getValue().getType(), is("codeType"));
assertThat(credentialsCaptor.getValue().getExpiresAt(), is(expiresAt));
assertThat(credentialsCaptor.getValue().getScope(), is("codeScope"));
}
@SuppressWarnings("deprecation")
@Test
public void shouldResumeWithIntentWithImplicitGrant() throws Exception {
WebAuthProvider.init(account)
.useCodeGrant(false)
.start(activity, callback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
String sentState = uri.getQueryParameter(KEY_STATE);
assertThat(sentState, is(not(isEmptyOrNullString())));
Intent intent = createAuthIntent(createHash("urlId", "urlAccess", "urlRefresh", "urlType", 1111L, sentState, null, null));
assertTrue(WebAuthProvider.resume(intent));
ArgumentCaptor<Credentials> credentialsCaptor = ArgumentCaptor.forClass(Credentials.class);
verify(callback).onSuccess(credentialsCaptor.capture());
assertThat(credentialsCaptor.getValue(), is(notNullValue()));
assertThat(credentialsCaptor.getValue().getIdToken(), is("urlId"));
assertThat(credentialsCaptor.getValue().getAccessToken(), is("urlAccess"));
assertThat(credentialsCaptor.getValue().getRefreshToken(), is("urlRefresh"));
assertThat(credentialsCaptor.getValue().getType(), is("urlType"));
assertThat(credentialsCaptor.getValue().getExpiresIn(), is(1111L));
}
@SuppressWarnings("deprecation")
@Test
public void shouldResumeWithRequestCodeWithImplicitGrant() throws Exception {
WebAuthProvider.init(account)
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
String sentState = uri.getQueryParameter(KEY_STATE);
assertThat(sentState, is(not(isEmptyOrNullString())));
Intent intent = createAuthIntent(createHash("urlId", "urlAccess", "urlRefresh", "urlType", 1111L, sentState, null, null));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
ArgumentCaptor<Credentials> credentialsCaptor = ArgumentCaptor.forClass(Credentials.class);
verify(callback).onSuccess(credentialsCaptor.capture());
assertThat(credentialsCaptor.getValue(), is(notNullValue()));
assertThat(credentialsCaptor.getValue().getIdToken(), is("urlId"));
assertThat(credentialsCaptor.getValue().getAccessToken(), is("urlAccess"));
assertThat(credentialsCaptor.getValue().getRefreshToken(), is("urlRefresh"));
assertThat(credentialsCaptor.getValue().getType(), is("urlType"));
assertThat(credentialsCaptor.getValue().getExpiresIn(), is(1111L));
}
@SuppressWarnings("deprecation")
@Test
public void shouldResumeWithRequestCodeWhenResultCancelled() throws Exception {
WebAuthProvider.init(account)
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
Intent intent = createAuthIntent(null);
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_CANCELED, intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("a0.authentication_canceled"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("The user closed the browser app and the authentication was canceled."));
}
@SuppressWarnings("deprecation")
@Test
public void shouldResumeWithIntentWhenResultCancelled() throws Exception {
WebAuthProvider.init(account)
.useCodeGrant(false)
.start(activity, callback);
Intent intent = createAuthIntent(null);
assertTrue(WebAuthProvider.resume(intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("a0.authentication_canceled"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("The user closed the browser app and the authentication was canceled."));
}
@Test
public void shouldCalculateExpiresAtDateOnResumeAuthentication() throws Exception {
WebAuthProvider.init(account)
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
OAuthManager managerInstance = (OAuthManager) WebAuthProvider.getManagerInstance();
managerInstance.setCurrentTimeInMillis(CURRENT_TIME_MS);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
String sentState = uri.getQueryParameter(KEY_STATE);
assertThat(sentState, is(not(isEmptyOrNullString())));
Intent intent = createAuthIntent(createHash("urlId", "urlAccess", "urlRefresh", "urlType", 1111L, sentState, null, null));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
ArgumentCaptor<Credentials> credentialsCaptor = ArgumentCaptor.forClass(Credentials.class);
verify(callback).onSuccess(credentialsCaptor.capture());
assertThat(credentialsCaptor.getValue(), is(notNullValue()));
long expirationTime = CURRENT_TIME_MS + 1111L * 1000;
assertThat(credentialsCaptor.getValue().getExpiresAt(), is(notNullValue()));
assertThat(credentialsCaptor.getValue().getExpiresAt().getTime(), is(expirationTime));
}
@SuppressWarnings("deprecation")
@Test
public void shouldReThrowAnyFailedCodeExchangeDialog() throws Exception {
final Dialog dialog = Mockito.mock(Dialog.class);
PKCE pkce = Mockito.mock(PKCE.class);
Mockito.doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
callbackCaptor.getValue().onFailure(dialog);
return null;
}
}).when(pkce).getToken(any(String.class), callbackCaptor.capture());
WebAuthProvider.init(account)
.withState("1234567890")
.useCodeGrant(true)
.withPKCE(pkce)
.start(activity, callback);
Intent intent = createAuthIntent(createHash("urlId", "urlAccess", "urlRefresh", "urlType", 1111L, "1234567890", null, null));
assertTrue(WebAuthProvider.resume(intent));
verify(callback).onFailure(dialog);
}
@SuppressWarnings("deprecation")
@Test
public void shouldReThrowAnyFailedCodeExchangeException() throws Exception {
final AuthenticationException exception = Mockito.mock(AuthenticationException.class);
PKCE pkce = Mockito.mock(PKCE.class);
Mockito.doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
callbackCaptor.getValue().onFailure(exception);
return null;
}
}).when(pkce).getToken(any(String.class), callbackCaptor.capture());
WebAuthProvider.init(account)
.withState("1234567890")
.useCodeGrant(true)
.withPKCE(pkce)
.start(activity, callback);
Intent intent = createAuthIntent(createHash("urlId", "urlAccess", "urlRefresh", "urlType", 1111L, "1234567890", null, null));
assertTrue(WebAuthProvider.resume(intent));
verify(callback).onFailure(exception);
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeWithIntentWithAccessDenied() throws Exception {
WebAuthProvider.init(account)
.withState("1234567890")
.useCodeGrant(false)
.start(activity, callback);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", "access_denied", null));
assertTrue(WebAuthProvider.resume(intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("access_denied"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("Permissions were not granted. Try again."));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeWithRequestCodeWithAccessDenied() throws Exception {
WebAuthProvider.init(account)
.withState("1234567890")
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", "access_denied", null));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("access_denied"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("Permissions were not granted. Try again."));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeWithIntentWithRuleError() throws Exception {
WebAuthProvider.init(account)
.withState("1234567890")
.useCodeGrant(false)
.start(activity, callback);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", "unauthorized", "Custom Rule Error"));
assertTrue(WebAuthProvider.resume(intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("unauthorized"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("Custom Rule Error"));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeWithRequestCodeWithRuleError() throws Exception {
WebAuthProvider.init(account)
.withState("1234567890")
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", "unauthorized", "Custom Rule Error"));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("unauthorized"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("Custom Rule Error"));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeWithIntentWithConfigurationInvalid() throws Exception {
WebAuthProvider.init(account)
.withState("1234567890")
.useCodeGrant(false)
.start(activity, callback);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", "some other error", null));
assertTrue(WebAuthProvider.resume(intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("a0.invalid_configuration"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("The application isn't configured properly for the social connection. Please check your Auth0's application configuration"));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeWithRequestCodeWithConfigurationInvalid() throws Exception {
WebAuthProvider.init(account)
.withState("1234567890")
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", "some other error", null));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("a0.invalid_configuration"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("The application isn't configured properly for the social connection. Please check your Auth0's application configuration"));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeWithIntentWithLoginRequired() throws Exception {
WebAuthProvider.init(account)
.withState("1234567890")
.useCodeGrant(false)
.start(activity, callback);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", "login_required", "Login Required"));
assertTrue(WebAuthProvider.resume(intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("login_required"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("Login Required"));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeWithRequestCodeWithLoginRequired() throws Exception {
WebAuthProvider.init(account)
.withState("1234567890")
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", "login_required", "Login Required"));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("login_required"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("Login Required"));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeWithIntentWithInvalidState() throws Exception {
WebAuthProvider.init(account)
.withState("abcdefghijk")
.useCodeGrant(false)
.start(activity, callback);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", null, null));
assertTrue(WebAuthProvider.resume(intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("access_denied"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("The received state is invalid. Try again."));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeWithRequestCodeWithInvalidState() throws Exception {
WebAuthProvider.init(account)
.withState("abcdefghijk")
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", null, null));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("access_denied"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("The received state is invalid. Try again."));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeWithIntentWithInvalidNonce() throws Exception {
WebAuthProvider.init(account)
.withState("state")
.withNonce("0987654321")
.withResponseType(ResponseType.ID_TOKEN)
.start(activity, callback);
Intent intent = createAuthIntent(createHash("eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJub25jZSI6IjEyMzQ1Njc4OTAifQ.oUb6xFIEPJQrFbel_Js4SaOwpFfM_kxHxI7xDOHgghk", null, null, null, null, "state", null, null));
assertTrue(WebAuthProvider.resume(intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("access_denied"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("The received nonce is invalid. Try again."));
}
@SuppressWarnings({"deprecation", "ThrowableResultOfMethodCallIgnored"})
@Test
public void shouldFailToResumeWithRequestCodeWithInvalidNonce() throws Exception {
WebAuthProvider.init(account)
.withState("state")
.withNonce("0987654321")
.withResponseType(ResponseType.ID_TOKEN)
.start(activity, callback, REQUEST_CODE);
Intent intent = createAuthIntent(createHash("eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJub25jZSI6IjEyMzQ1Njc4OTAifQ.oUb6xFIEPJQrFbel_Js4SaOwpFfM_kxHxI7xDOHgghk", null, null, null, null, "state", null, null));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("access_denied"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("The received nonce is invalid. Try again."));
}
@SuppressWarnings("deprecation")
@Test
public void shouldFailToResumeWithUnexpectedRequestCode() throws Exception {
verifyNoMoreInteractions(callback);
WebAuthProvider.init(account)
.useCodeGrant(false)
.start(activity, callback);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", null, null));
assertFalse(WebAuthProvider.resume(999, Activity.RESULT_OK, intent));
}
@SuppressWarnings("deprecation")
@Test
public void shouldFailToResumeWithResultNotOK() throws Exception {
verifyNoMoreInteractions(callback);
WebAuthProvider.init(account)
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", null, null));
assertFalse(WebAuthProvider.resume(REQUEST_CODE, 999, intent));
}
@SuppressWarnings("deprecation")
@Test
public void shouldFailToResumeWithIntentWithEmptyUriValues() throws Exception {
verifyNoMoreInteractions(callback);
WebAuthProvider.init(account)
.withState("abcdefghijk")
.useCodeGrant(false)
.start(activity, callback);
Intent intent = createAuthIntent("");
assertFalse(WebAuthProvider.resume(intent));
}
@SuppressWarnings("deprecation")
@Test
public void shouldFailToResumeWithRequestCodeWithEmptyUriValues() throws Exception {
verifyNoMoreInteractions(callback);
WebAuthProvider.init(account)
.withState("abcdefghijk")
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
Intent intent = createAuthIntent("");
assertFalse(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
}
@Test
public void shouldFailToResumeWithIntentWithoutFirstInitProvider() throws Exception {
Intent intent = createAuthIntent("");
assertFalse(WebAuthProvider.resume(intent));
}
@SuppressWarnings("deprecation")
@Test
public void shouldFailToResumeWithRequestCodeWithoutFirstInitProvider() throws Exception {
Intent intent = createAuthIntent("");
assertFalse(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
}
@SuppressWarnings("deprecation")
@Test
public void shouldResumeWithIntentWithNullIntent() throws Exception {
WebAuthProvider.init(account)
.withState("abcdefghijk")
.useCodeGrant(false)
.start(activity, callback);
assertFalse(WebAuthProvider.resume(null));
}
@SuppressWarnings("deprecation")
@Test
public void shouldFailToResumeWithRequestCodeWithNullIntent() throws Exception {
WebAuthProvider.init(account)
.withState("abcdefghijk")
.useCodeGrant(false)
.start(activity, callback, REQUEST_CODE);
assertFalse(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, null));
}
@Test
public void shouldClearInstanceAfterSuccessAuthenticationWithIntent() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback);
assertThat(WebAuthProvider.getManagerInstance(), is(notNullValue()));
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", null, null));
assertTrue(WebAuthProvider.resume(intent));
assertThat(WebAuthProvider.getManagerInstance(), is(nullValue()));
}
@SuppressWarnings("deprecation")
@Test
public void shouldClearInstanceAfterSuccessAuthenticationWithRequestCode() throws Exception {
WebAuthProvider.init(account)
.start(activity, callback, REQUEST_CODE);
assertThat(WebAuthProvider.getManagerInstance(), is(notNullValue()));
Intent intent = createAuthIntent(createHash("iToken", "aToken", null, "refresh_token", null, "1234567890", null, null));
assertTrue(WebAuthProvider.resume(REQUEST_CODE, Activity.RESULT_OK, intent));
assertThat(WebAuthProvider.getManagerInstance(), is(nullValue()));
}
@Test
public void shouldFailToStartWithBrowserWhenNoBrowserAppIsInstalled() throws Exception {
prepareBrowserApp(false, null);
WebAuthProvider.init(account)
.useBrowser(true)
.start(activity, callback);
verify(callback).onFailure(authExceptionCaptor.capture());
assertThat(authExceptionCaptor.getValue(), is(notNullValue()));
assertThat(authExceptionCaptor.getValue().getCode(), is("a0.browser_not_available"));
assertThat(authExceptionCaptor.getValue().getDescription(), is("No Browser application installed to perform web authentication."));
assertThat(WebAuthProvider.getManagerInstance(), is(nullValue()));
}
@SuppressWarnings("deprecation")
@Test
public void shouldNotFailToStartWithWebviewWhenNoBrowserAppIsInstalled() throws Exception {
prepareBrowserApp(false, null);
WebAuthProvider.init(account)
.useBrowser(false)
.start(activity, callback, REQUEST_CODE);
verify(activity).startActivityForResult(intentCaptor.capture(), any(Integer.class));
Intent intent = intentCaptor.getValue();
assertThat(intent, is(notNullValue()));
assertThat(intent, hasComponent(AuthenticationActivity.class.getName()));
assertThat(intent, hasFlag(Intent.FLAG_ACTIVITY_CLEAR_TOP));
assertThat(intent.getData(), is(nullValue()));
verify(callback, never()).onFailure(any(AuthenticationException.class));
}
@Test
public void shouldHaveBrowserAppInstalled() {
ArgumentCaptor<Intent> intentCaptor = ArgumentCaptor.forClass(Intent.class);
prepareBrowserApp(true, intentCaptor);
boolean hasBrowserApp = WebAuthProvider.hasBrowserAppInstalled(activity.getPackageManager());
MatcherAssert.assertThat(hasBrowserApp, Is.is(true));
MatcherAssert.assertThat(intentCaptor.getValue(), Is.is(IntentMatchers.hasAction(Intent.ACTION_VIEW)));
MatcherAssert.assertThat(URLUtil.isValidUrl(intentCaptor.getValue().getDataString()), Is.is(true));
}
@Test
public void shouldNotHaveBrowserAppInstalled() {
ArgumentCaptor<Intent> intentCaptor = ArgumentCaptor.forClass(Intent.class);
prepareBrowserApp(false, intentCaptor);
boolean hasBrowserApp = WebAuthProvider.hasBrowserAppInstalled(activity.getPackageManager());
MatcherAssert.assertThat(hasBrowserApp, Is.is(false));
MatcherAssert.assertThat(intentCaptor.getValue(), Is.is(IntentMatchers.hasAction(Intent.ACTION_VIEW)));
MatcherAssert.assertThat(URLUtil.isValidUrl(intentCaptor.getValue().getDataString()), Is.is(true));
}
//** ** ** ** ** ** **//
//** ** ** ** ** ** **//
//** LOG OUT FEATURE **//
//** ** ** ** ** ** **//
//** ** ** ** ** ** **//
@Test
public void shouldInitLogoutWithAccount() throws Exception {
WebAuthProvider.logout(account)
.start(activity, voidCallback);
assertNotNull(WebAuthProvider.getManagerInstance());
}
//scheme
@Test
public void shouldHaveDefaultSchemeOnLogout() throws Exception {
WebAuthProvider.logout(account)
.start(activity, voidCallback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithName("returnTo"));
Uri returnToUri = Uri.parse(uri.getQueryParameter("returnTo"));
assertThat(returnToUri, hasScheme("https"));
}
@Test
public void shouldSetSchemeOnLogout() throws Exception {
WebAuthProvider.logout(account)
.withScheme("myapp")
.start(activity, voidCallback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithName("returnTo"));
Uri returnToUri = Uri.parse(uri.getQueryParameter("returnTo"));
assertThat(returnToUri, hasScheme("myapp"));
}
// client id
@Test
public void shouldAlwaysSetClientIdOnLogout() throws Exception {
WebAuthProvider.logout(account)
.start(activity, voidCallback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue("client_id", "clientId"));
}
// auth0 related
@Test
public void shouldHaveTelemetryInfoOnLogout() throws Exception {
WebAuthProvider.logout(account)
.start(activity, voidCallback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri, hasParamWithValue(is("auth0Client"), not(isEmptyOrNullString())));
}
@Test
public void shouldHaveReturnToUriOnLogout() throws Exception {
WebAuthProvider.logout(account)
.start(activity, voidCallback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
assertThat(uri.getQueryParameter("returnTo"), is("https://domain/android/com.auth0.android.auth0.test/callback"));
}
// Launch log out
@SuppressWarnings("deprecation")
@Test
public void shouldStartLogout() throws Exception {
WebAuthProvider.logout(account)
.start(activity, voidCallback);
verify(activity).startActivity(intentCaptor.capture());
Intent intent = intentCaptor.getValue();
assertThat(intent, is(notNullValue()));
assertThat(intent, hasComponent(AuthenticationActivity.class.getName()));
assertThat(intent, hasFlag(Intent.FLAG_ACTIVITY_CLEAR_TOP));
assertThat(intent.getData(), is(nullValue()));
Bundle extras = intentCaptor.getValue().getExtras();
assertThat(extras.getParcelable(AuthenticationActivity.EXTRA_AUTHORIZE_URI), is(notNullValue()));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CONNECTION_NAME), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_FULL_SCREEN), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_BROWSER), is(true));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CT_OPTIONS), is(true));
assertThat(extras.getBoolean(AuthenticationActivity.EXTRA_USE_BROWSER), is(true));
assertThat(extras.getParcelable(AuthenticationActivity.EXTRA_CT_OPTIONS), is(nullValue()));
}
@Test
public void shouldStartLogoutWithCustomTabsOptions() throws Exception {
CustomTabsOptions options = mock(CustomTabsOptions.class);
WebAuthProvider.logout(account)
.withCustomTabsOptions(options)
.start(activity, voidCallback);
verify(activity).startActivity(intentCaptor.capture());
Intent intent = intentCaptor.getValue();
assertThat(intent, is(notNullValue()));
assertThat(intent, hasComponent(AuthenticationActivity.class.getName()));
assertThat(intent, hasFlag(Intent.FLAG_ACTIVITY_CLEAR_TOP));
assertThat(intent.getData(), is(nullValue()));
Bundle extras = intentCaptor.getValue().getExtras();
assertThat(extras.getParcelable(AuthenticationActivity.EXTRA_AUTHORIZE_URI), is(notNullValue()));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CONNECTION_NAME), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_FULL_SCREEN), is(false));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_USE_BROWSER), is(true));
assertThat(extras.containsKey(AuthenticationActivity.EXTRA_CT_OPTIONS), is(true));
assertThat(extras.getBoolean(AuthenticationActivity.EXTRA_USE_BROWSER), is(true));
assertThat((CustomTabsOptions) extras.getParcelable(AuthenticationActivity.EXTRA_CT_OPTIONS), is(options));
}
@Test
public void shouldFailToStartLogoutWhenNoBrowserAppIsInstalled() throws Exception {
prepareBrowserApp(false, null);
WebAuthProvider.logout(account)
.start(activity, voidCallback);
verify(voidCallback).onFailure(auth0ExceptionCaptor.capture());
assertThat(auth0ExceptionCaptor.getValue(), is(notNullValue()));
assertThat(auth0ExceptionCaptor.getValue().getMessage(), is("Cannot perform web log out"));
Throwable cause = auth0ExceptionCaptor.getValue().getCause();
assertThat(cause, is(CoreMatchers.<Throwable>instanceOf(ActivityNotFoundException.class)));
assertThat(cause.getMessage(), is("No Browser application installed."));
assertThat(WebAuthProvider.getManagerInstance(), is(nullValue()));
}
@Test
public void shouldResumeLogoutSuccessfullyWithIntent() throws Exception {
WebAuthProvider.logout(account)
.start(activity, voidCallback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
Intent intent = createAuthIntent("");
assertTrue(WebAuthProvider.resume(intent));
verify(voidCallback).onSuccess(any(Void.class));
}
@Test
public void shouldResumeLogoutFailingWithIntent() throws Exception {
WebAuthProvider.logout(account)
.start(activity, voidCallback);
verify(activity).startActivity(intentCaptor.capture());
Uri uri = intentCaptor.getValue().getParcelableExtra(AuthenticationActivity.EXTRA_AUTHORIZE_URI);
assertThat(uri, is(notNullValue()));
//null data translates to result canceled
Intent intent = createAuthIntent(null);
assertTrue(WebAuthProvider.resume(intent));
verify(voidCallback).onFailure(auth0ExceptionCaptor.capture());
assertThat(auth0ExceptionCaptor.getValue(), is(notNullValue()));
assertThat(auth0ExceptionCaptor.getValue().getMessage(), is("The user closed the browser app so the logout was cancelled."));
assertThat(WebAuthProvider.getManagerInstance(), is(nullValue()));
}
@Test
public void shouldClearLogoutManagerInstanceAfterSuccessfulLogout() throws Exception {
WebAuthProvider.logout(account)
.start(activity, voidCallback);
assertThat(WebAuthProvider.getManagerInstance(), is(notNullValue()));
Intent intent = createAuthIntent("");
assertTrue(WebAuthProvider.resume(intent));
assertThat(WebAuthProvider.getManagerInstance(), is(nullValue()));
}
//Test Helper Functions
private Intent createAuthIntent(@Nullable String hash) {
Intent intent = new Intent();
if (hash == null) {
return intent;
}
Uri validUri = Uri.parse("https://domain.auth0.com/android/package/callback" + hash);
intent.setData(validUri);
return intent;
}
private void prepareBrowserApp(boolean isAppInstalled, @Nullable ArgumentCaptor<Intent> intentCaptor) {
PackageManager pm = mock(PackageManager.class);
ResolveInfo info = null;
if (isAppInstalled) {
info = mock(ResolveInfo.class);
ApplicationInfo appInfo = mock(ApplicationInfo.class);
appInfo.packageName = "com.auth0.test";
ActivityInfo actInfo = mock(ActivityInfo.class);
actInfo.applicationInfo = appInfo;
actInfo.name = "Auth0 Browser";
info.activityInfo = actInfo;
}
when(pm.resolveActivity(intentCaptor != null ? intentCaptor.capture() : any(Intent.class), eq(PackageManager.MATCH_DEFAULT_ONLY))).thenReturn(info);
when(activity.getPackageManager()).thenReturn(pm);
}
private String createHash(@Nullable String idToken, @Nullable String accessToken, @Nullable String refreshToken, @Nullable String tokenType, @Nullable Long expiresIn, @Nullable String state, @Nullable String error, @Nullable String errorDescription) {
String hash = "#";
if (accessToken != null) {
hash = hash.concat("access_token=")
.concat(accessToken)
.concat("&");
}
if (idToken != null) {
hash = hash.concat("id_token=")
.concat(idToken)
.concat("&");
}
if (refreshToken != null) {
hash = hash.concat("refresh_token=")
.concat(refreshToken)
.concat("&");
}
if (tokenType != null) {
hash = hash.concat("token_type=")
.concat(tokenType)
.concat("&");
}
if (expiresIn != null) {
hash = hash.concat("expires_in=")
.concat(String.valueOf(expiresIn))
.concat("&");
}
if (state != null) {
hash = hash.concat("state=")
.concat(state)
.concat("&");
}
if (error != null) {
hash = hash.concat("error=")
.concat(error)
.concat("&");
}
if (errorDescription != null) {
hash = hash.concat("error_description=")
.concat(errorDescription)
.concat("&");
}
if (hash.endsWith("&")) {
hash = hash.substring(0, hash.length() - 1);
}
return hash.length() == 1 ? "" : hash;
}
private String customNonceJWT(@NonNull String nonce) {
String header = encodeString("{}");
String bodyBuilder = "{\"nonce\":\"" + nonce + "\"}";
String body = encodeString(bodyBuilder);
String signature = "sign";
return String.format("%s.%s.%s", header, body, signature);
}
private String encodeString(String source) {
byte[] bytes = Base64.encode(source.getBytes(), Base64.URL_SAFE | Base64.NO_WRAP | Base64.NO_PADDING);
String res = "";
try {
res = new String(bytes, "UTF-8");
} catch (UnsupportedEncodingException ignored) {
}
return res;
}
} | rename authentication tests using a prefix/sufix
| auth0/src/test/java/com/auth0/android/provider/WebAuthProviderTest.java | rename authentication tests using a prefix/sufix |
|
Java | cc0-1.0 | 42bd874567248bef3b93ec1fbe3aa1679fa04e2b | 0 | HogeschoolLeiden/website,HogeschoolLeiden/website,HogeschoolLeiden/website,HogeschoolLeiden/website | package nl.hsleiden.components.catalog;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.jcr.RepositoryException;
import nl.hsleiden.componentsinfo.ContactPersonsInfo;
import nl.hsleiden.utils.Constants.Attributes;
import nl.hsleiden.utils.Constants.WidgetConstants;
import org.hippoecm.hst.content.beans.standard.HippoBean;
import org.hippoecm.hst.core.component.HstComponentException;
import org.hippoecm.hst.core.component.HstRequest;
import org.hippoecm.hst.core.component.HstResponse;
import org.hippoecm.hst.core.parameters.ParametersInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.tdclighthouse.prototype.components.AjaxEnabledComponent;
import com.tdclighthouse.prototype.utils.BeanUtils;
@ParametersInfo(type = ContactPersonsInfo.class)
public class ContactPersons extends AjaxEnabledComponent {
private static final Logger LOG = LoggerFactory.getLogger(ContactPersons.class);
public Map<String, Object> getModel(HstRequest request, HstResponse response) {
try {
ContactPersonsInfo parametersInfo = getConfiguration(request);
return populateModel(request, parametersInfo);
} catch (RepositoryException e) {
LOG.error(e.getMessage(), e);
throw new HstComponentException(e.getMessage(), e);
}
}
protected Map<String, Object> populateModel(HstRequest request, ContactPersonsInfo parametersInfo) {
Map<String, Object> model = new HashMap<String, Object>();
model.put("info", parametersInfo);
addItemsToModel(request, model, parametersInfo);
return model;
}
private void addItemsToModel(HstRequest request, Map<String, Object> model, ContactPersonsInfo parametersInfo) {
List<HippoBean> items = new ArrayList<HippoBean>();
//items should be added only if not null
items.add(BeanUtils.getBeanViaAbsolutionPath(parametersInfo.getFirstContact(), request));
items.add(BeanUtils.getBeanViaAbsolutionPath(parametersInfo.getSecondContact(), request));
items.add(BeanUtils.getBeanViaAbsolutionPath(parametersInfo.getThirdContact(), request));
if (!items.isEmpty()) {
model.put(Attributes.ITEMS, items);
} else {
request.setAttribute(WidgetConstants.WEB_MASTER_MESSAGE, "webmaster.nocontacts.message");
}
}
private ContactPersonsInfo getConfiguration(HstRequest request) throws RepositoryException {
ContactPersonsInfo paramInfo = this.<ContactPersonsInfo> getComponentParametersInfo(request);
return paramInfo;
}
}
| site/src/main/java/nl/hsleiden/components/catalog/ContactPersons.java | package nl.hsleiden.components.catalog;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.jcr.RepositoryException;
import nl.hsleiden.componentsinfo.ContactPersonsInfo;
import nl.hsleiden.utils.HslUtils;
import nl.hsleiden.utils.Constants.Attributes;
import nl.hsleiden.utils.Constants.WidgetConstants;
import org.hippoecm.hst.content.beans.standard.HippoBean;
import org.hippoecm.hst.core.component.HstComponentException;
import org.hippoecm.hst.core.component.HstRequest;
import org.hippoecm.hst.core.component.HstResponse;
import org.hippoecm.hst.core.parameters.ParametersInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.tdclighthouse.prototype.components.AjaxEnabledComponent;
@ParametersInfo(type = ContactPersonsInfo.class)
public class ContactPersons extends AjaxEnabledComponent {
private static final Logger LOG = LoggerFactory.getLogger(ContactPersons.class);
public Map<String, Object> getModel(HstRequest request, HstResponse response) {
try {
ContactPersonsInfo parametersInfo = getConfiguration(request);
return populateModel(request, parametersInfo);
} catch (RepositoryException e) {
LOG.error(e.getMessage(), e);
throw new HstComponentException(e.getMessage(), e);
}
}
protected Map<String, Object> populateModel(HstRequest request, ContactPersonsInfo parametersInfo) {
Map<String, Object> model = new HashMap<String, Object>();
model.put("info", parametersInfo);
addItemsToModel(request, model, parametersInfo);
return model;
}
private void addItemsToModel(HstRequest request, Map<String, Object> model, ContactPersonsInfo parametersInfo) {
List<HippoBean> items = new ArrayList<HippoBean>();
//items should be added only if not null
items.add(HslUtils.getSelectedBean(request, parametersInfo.getFirstContact()));
items.add(HslUtils.getSelectedBean(request, parametersInfo.getSecondContact()));
items.add(HslUtils.getSelectedBean(request, parametersInfo.getThirdContact()));
if (!items.isEmpty()) {
model.put(Attributes.ITEMS, items);
} else {
request.setAttribute(WidgetConstants.WEB_MASTER_MESSAGE, "webmaster.nocontacts.message");
}
}
private ContactPersonsInfo getConfiguration(HstRequest request) throws RepositoryException {
ContactPersonsInfo paramInfo = this.<ContactPersonsInfo> getComponentParametersInfo(request);
return paramInfo;
}
}
| fix compilation failure after merge
| site/src/main/java/nl/hsleiden/components/catalog/ContactPersons.java | fix compilation failure after merge |
|
Java | epl-1.0 | 5b447f9bb376665fe7dc9471a5810995fcc43fdc | 0 | sonatype/sisu,sonatype/sisu | /*******************************************************************************
* Copyright (c) 2010-2011 Sonatype, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html
* The Apache License v2.0 is available at
* http://www.apache.org/licenses/LICENSE-2.0.html
* You may elect to redistribute this code under either of these licenses.
*******************************************************************************/
package org.codehaus.plexus;
import java.net.URL;
import java.util.Map;
import org.codehaus.plexus.classworlds.ClassWorld;
import org.codehaus.plexus.classworlds.realm.ClassRealm;
public final class DefaultContainerConfiguration
implements ContainerConfiguration
{
// ----------------------------------------------------------------------
// Implementation fields
// ----------------------------------------------------------------------
private String configurationPath;
private URL configurationUrl;
private ClassWorld classWorld;
private ClassRealm classRealm;
private Map<Object, Object> contextData;
private String componentVisibility = PlexusConstants.REALM_VISIBILITY;
private String classPathScanning = PlexusConstants.SCANNING_OFF;
private boolean autoWiring;
// ----------------------------------------------------------------------
// Public methods
// ----------------------------------------------------------------------
public ContainerConfiguration setName( final String name )
{
return this;
}
public ContainerConfiguration setContainerConfiguration( final String configurationPath )
{
this.configurationPath = configurationPath;
return this;
}
public String getContainerConfiguration()
{
return configurationPath;
}
public ContainerConfiguration setContainerConfigurationURL( final URL configurationUrl )
{
this.configurationUrl = configurationUrl;
return this;
}
public URL getContainerConfigurationURL()
{
return configurationUrl;
}
public ContainerConfiguration setClassWorld( final ClassWorld classWorld )
{
this.classWorld = classWorld;
return this;
}
public ClassWorld getClassWorld()
{
return classWorld;
}
public ContainerConfiguration setRealm( final ClassRealm classRealm )
{
this.classRealm = classRealm;
return this;
}
public ClassRealm getRealm()
{
return classRealm;
}
public ContainerConfiguration setContext( final Map<Object, Object> contextData )
{
this.contextData = contextData;
return this;
}
public Map<Object, Object> getContext()
{
return contextData;
}
public ContainerConfiguration setComponentVisibility( final String componentVisibility )
{
this.componentVisibility = componentVisibility;
return this;
}
public String getComponentVisibility()
{
return componentVisibility;
}
public ContainerConfiguration setClassPathScanning( final String classPathScanning )
{
this.classPathScanning = classPathScanning;
if ( !PlexusConstants.SCANNING_OFF.equalsIgnoreCase( classPathScanning ) )
{
autoWiring = true;
}
return this;
}
public String getClassPathScanning()
{
return classPathScanning;
}
public ContainerConfiguration setAutoWiring( final boolean autoWiring )
{
this.autoWiring = autoWiring;
return this;
}
public boolean getAutoWiring()
{
return autoWiring;
}
}
| sisu-inject/guice-plexus/guice-plexus-shim/src/main/java/org/codehaus/plexus/DefaultContainerConfiguration.java | /*******************************************************************************
* Copyright (c) 2010-2011 Sonatype, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html
* The Apache License v2.0 is available at
* http://www.apache.org/licenses/LICENSE-2.0.html
* You may elect to redistribute this code under either of these licenses.
*******************************************************************************/
package org.codehaus.plexus;
import java.net.URL;
import java.util.Map;
import org.codehaus.plexus.classworlds.ClassWorld;
import org.codehaus.plexus.classworlds.realm.ClassRealm;
import org.sonatype.guice.bean.binders.BeanScanning;
public final class DefaultContainerConfiguration
implements ContainerConfiguration
{
// ----------------------------------------------------------------------
// Implementation fields
// ----------------------------------------------------------------------
private String configurationPath;
private URL configurationUrl;
private ClassWorld classWorld;
private ClassRealm classRealm;
private Map<Object, Object> contextData;
private String componentVisibility = PlexusConstants.REALM_VISIBILITY;
private String classPathScanning = BeanScanning.OFF.name();
private boolean autoWiring;
// ----------------------------------------------------------------------
// Public methods
// ----------------------------------------------------------------------
public ContainerConfiguration setName( final String name )
{
return this;
}
public ContainerConfiguration setContainerConfiguration( final String configurationPath )
{
this.configurationPath = configurationPath;
return this;
}
public String getContainerConfiguration()
{
return configurationPath;
}
public ContainerConfiguration setContainerConfigurationURL( final URL configurationUrl )
{
this.configurationUrl = configurationUrl;
return this;
}
public URL getContainerConfigurationURL()
{
return configurationUrl;
}
public ContainerConfiguration setClassWorld( final ClassWorld classWorld )
{
this.classWorld = classWorld;
return this;
}
public ClassWorld getClassWorld()
{
return classWorld;
}
public ContainerConfiguration setRealm( final ClassRealm classRealm )
{
this.classRealm = classRealm;
return this;
}
public ClassRealm getRealm()
{
return classRealm;
}
public ContainerConfiguration setContext( final Map<Object, Object> contextData )
{
this.contextData = contextData;
return this;
}
public Map<Object, Object> getContext()
{
return contextData;
}
public ContainerConfiguration setComponentVisibility( final String componentVisibility )
{
this.componentVisibility = componentVisibility;
return this;
}
public String getComponentVisibility()
{
return componentVisibility;
}
public ContainerConfiguration setClassPathScanning( final String classPathScanning )
{
this.classPathScanning = classPathScanning;
if ( !PlexusConstants.SCANNING_OFF.equalsIgnoreCase( classPathScanning ) )
{
autoWiring = true;
}
return this;
}
public String getClassPathScanning()
{
return classPathScanning;
}
public ContainerConfiguration setAutoWiring( final boolean autoWiring )
{
this.autoWiring = autoWiring;
return this;
}
public boolean getAutoWiring()
{
return autoWiring;
}
}
| Fix constant
| sisu-inject/guice-plexus/guice-plexus-shim/src/main/java/org/codehaus/plexus/DefaultContainerConfiguration.java | Fix constant |
|
Java | epl-1.0 | 2e28bb955b0134b881c910e33ee11c7c9a76dbd4 | 0 | vitruv-tools/Vitruv | /*******************************************************************************
* Copyright (c) 2014 Felix Kutzner.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Felix Kutzner - initial implementation.
******************************************************************************/
package tools.vitruv.domains.emf.monitorededitor.monitor;
import java.util.List;
import org.apache.log4j.Logger;
import org.eclipse.core.commands.ExecutionException;
import org.eclipse.emf.ecore.change.ChangeDescription;
import org.eclipse.emf.ecore.change.util.ChangeRecorder;
import org.eclipse.emf.ecore.resource.Resource;
import tools.vitruv.domains.emf.monitorededitor.IEditorPartAdapterFactory.IEditorPartAdapter;
import tools.vitruv.domains.emf.monitorededitor.tools.ISaveEventListener;
import tools.vitruv.domains.emf.monitorededitor.tools.ResourceReloadListener;
import tools.vitruv.domains.emf.monitorededitor.tools.SaveEventListenerMgr;
import tools.vitruv.framework.change.description.TransactionalChange;
import tools.vitruv.framework.change.recording.AtomicEmfChangeRecorder;
import tools.vitruv.framework.uuid.UuidGeneratorAndResolver;
import tools.vitruv.framework.uuid.UuidGeneratorAndResolverImpl;
import tools.vitruv.framework.vsum.VirtualModel;
/**
* <p>
* A listener for EMF/GMF editors recording the changes in the editor's EMF model, calling the
* method <code>onSavedResource()</code> whenever the user saves the edited file.
* </p>
*
* <p>
* The listener is initially inactive and can be activated by calling <code>initialize()</code>. It
* remains active until <code>dispose()</code> is called.
* </p>
*
* <p>
* This listener class can be used by extending it and implementing the
* <code>onSavedResource()</code> method.
* </p>
*/
public abstract class EMFModelChangeRecordingEditorSaveListener {
/** The logger for {@link EMFModelChangeRecordingEditorSaveListener} instances. */
private static final Logger LOGGER = Logger.getLogger(EMFModelChangeRecordingEditorSaveListener.class);
/**
* <code>true</code> iff the listener has been initialized and is operational.
*/
private boolean isInitialized = false;
/**
* The {@link ChangeRecorder} used to record changes to the edited model.
*/
private AtomicEmfChangeRecorder changeRecorder;
/** The monitored EMF model resource. */
private final Resource targetResource;
/** The listener getting fired when the user saves the edited file. */
private final SaveEventListenerMgr saveActionListenerManager;
private VirtualModel virtualModel;
/**
* A constructor for {@link EMFModelChangeRecordingEditorSaveListener} instances. The listener
* remains inactive until <code>initialize()</code> is called.
*
* @param editorAdapter
* An {@link IEditorPartAdapter} instance adapting the EMF/GMF editor which needs to be
* monitored.
*/
public EMFModelChangeRecordingEditorSaveListener(IEditorPartAdapter editorAdapter) {
this.targetResource = editorAdapter.getEditedModelResource();
this.saveActionListenerManager = new SaveEventListenerMgr();
saveActionListenerManager.restrictSaveActionsToEditorPart(editorAdapter.getEditorPart());
LOGGER.trace("Constructed a listener for an editor of type " + editorAdapter.getClass().getCanonicalName()
+ " for Res. " + targetResource);
}
/**
* @return <code>true</code> iff the listener has been initialized.
*/
public boolean isInitialized() {
return isInitialized;
}
/**
* Creates a {@link ISaveEventListener} instance reading out the <code>changeRecorder</code> and
* calling <code>onSavedResource()</code> after the user has saved the edited file.
*
* @return The newly created {@link SaveActionExecutionListener}.
*/
protected ISaveEventListener createSaveActionExecutionListener() {
return new ISaveEventListener() {
@Override
public void onPreSave() {
}
@Override
public void onPostSave() {
List<TransactionalChange> changes = readOutChangesAndEndRecording();
LOGGER.trace("Detected a user save action, got change descriptions: " + changes);
onSavedResource(changes);
resetChangeRecorder();
}
@Override
public void postExecuteFailure(String commandId, ExecutionException exception) {
}
};
}
private void installResourceReloadListener() {
ResourceReloadListener rrl = new ResourceReloadListener(targetResource) {
@Override
protected void onResourceUnloaded() {
LOGGER.trace("Detected a resource unload event, deactivating the change recorder.");
deactivateChangeRecorder();
}
@Override
protected void onResourceLoaded() {
LOGGER.trace("Detected a resource load event, resetting the change recorder.");
resetChangeRecorder();
}
};
targetResource.eAdapters().add(rrl);
}
private void deactivateChangeRecorder() {
if (changeRecorder != null) {
changeRecorder.dispose();
}
changeRecorder = null;
}
/**
* Resets the change recorder by replacing it with a new one.
*/
protected void resetChangeRecorder() {
deactivateChangeRecorder();
UuidGeneratorAndResolver globalUuidGeneratorAndResolver = virtualModel != null
? virtualModel.getUuidGeneratorAndResolver()
: null;
// TODO Set strict mode to false
UuidGeneratorAndResolver localUuidResolver = new UuidGeneratorAndResolverImpl(globalUuidGeneratorAndResolver,
targetResource.getResourceSet(), false);
changeRecorder = new AtomicEmfChangeRecorder(localUuidResolver);
changeRecorder.addToRecording(targetResource);
changeRecorder.beginRecording();
}
/**
* @return The changes recorded since last resetting the change recorder.
*/
protected List<TransactionalChange> readOutChangesAndEndRecording() {
changeRecorder.endRecording();
return changeRecorder.getChanges();
}
/**
* Initializes the listener. After calling this method, the listener is active until
* <code>dispose()</code> is called.
*/
public void initialize(VirtualModel virtualModel) {
this.virtualModel = virtualModel;
if (!isInitialized) {
resetChangeRecorder();
installResourceReloadListener();
saveActionListenerManager.install(createSaveActionExecutionListener());
isInitialized = true;
} else {
LOGGER.warn("Called initialize() for an initialized instance," + " ignoring the call");
}
}
/**
* Disposes the listener. After calling this method, the listener is inactive until
* <code>initialize()</code> is called.
*/
public void dispose() {
if (isInitialized) {
saveActionListenerManager.dispose();
if (changeRecorder != null) {
changeRecorder.dispose();
}
isInitialized = false;
} else {
LOGGER.warn("Called dispose() for an uninitialized instance," + " ignoring the call");
}
}
/**
* @return The monitored EMF resource.
*/
public Resource getMonitoredResource() {
return targetResource;
}
/**
* The "listener" method getting called when the user saves the edited file.
*
* @param changeDescription
* The EMF {@link ChangeDescription} describing the changes to the EMF model since last
* saving it (rsp. since opening it, in case it has not been saved yet). This object is
* provided "as is" from a {@link ChangeRecorder} instance.
*/
protected abstract void onSavedResource(List<TransactionalChange> changeDescription);
}
| bundles/extensions/emfdomain/tools.vitruv.extensions.emf.monitorededitor/src/tools/vitruv/domains/emf/monitorededitor/monitor/EMFModelChangeRecordingEditorSaveListener.java | /*******************************************************************************
* Copyright (c) 2014 Felix Kutzner.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Felix Kutzner - initial implementation.
******************************************************************************/
package tools.vitruv.domains.emf.monitorededitor.monitor;
import java.util.List;
import org.apache.log4j.Logger;
import org.eclipse.core.commands.ExecutionException;
import org.eclipse.emf.ecore.change.ChangeDescription;
import org.eclipse.emf.ecore.change.util.ChangeRecorder;
import org.eclipse.emf.ecore.resource.Resource;
import tools.vitruv.domains.emf.monitorededitor.IEditorPartAdapterFactory.IEditorPartAdapter;
import tools.vitruv.domains.emf.monitorededitor.tools.ISaveEventListener;
import tools.vitruv.domains.emf.monitorededitor.tools.ResourceReloadListener;
import tools.vitruv.domains.emf.monitorededitor.tools.SaveEventListenerMgr;
import tools.vitruv.framework.change.description.TransactionalChange;
import tools.vitruv.framework.change.recording.AtomicEmfChangeRecorder;
import tools.vitruv.framework.uuid.UuidGeneratorAndResolver;
import tools.vitruv.framework.uuid.UuidGeneratorAndResolverImpl;
import tools.vitruv.framework.vsum.VirtualModel;
/**
* <p>
* A listener for EMF/GMF editors recording the changes in the editor's EMF model, calling the
* method <code>onSavedResource()</code> whenever the user saves the edited file.
* </p>
*
* <p>
* The listener is initially inactive and can be activated by calling <code>initialize()</code>. It
* remains active until <code>dispose()</code> is called.
* </p>
*
* <p>
* This listener class can be used by extending it and implementing the
* <code>onSavedResource()</code> method.
* </p>
*/
public abstract class EMFModelChangeRecordingEditorSaveListener {
/** The logger for {@link EMFModelChangeRecordingEditorSaveListener} instances. */
private static final Logger LOGGER = Logger.getLogger(EMFModelChangeRecordingEditorSaveListener.class);
/**
* <code>true</code> iff the listener has been initialized and is operational.
*/
private boolean isInitialized = false;
/**
* The {@link ChangeRecorder} used to record changes to the edited model.
*/
private AtomicEmfChangeRecorder changeRecorder;
/** The monitored EMF model resource. */
private final Resource targetResource;
/** The listener getting fired when the user saves the edited file. */
private final SaveEventListenerMgr saveActionListenerManager;
private VirtualModel virtualModel;
/**
* A constructor for {@link EMFModelChangeRecordingEditorSaveListener} instances. The listener
* remains inactive until <code>initialize()</code> is called.
*
* @param editorAdapter
* An {@link IEditorPartAdapter} instance adapting the EMF/GMF editor which needs to be
* monitored.
*/
public EMFModelChangeRecordingEditorSaveListener(IEditorPartAdapter editorAdapter) {
this.targetResource = editorAdapter.getEditedModelResource();
this.saveActionListenerManager = new SaveEventListenerMgr();
saveActionListenerManager.restrictSaveActionsToEditorPart(editorAdapter.getEditorPart());
LOGGER.trace("Constructed a listener for an editor of type " + editorAdapter.getClass().getCanonicalName()
+ " for Res. " + targetResource);
}
/**
* @return <code>true</code> iff the listener has been initialized.
*/
public boolean isInitialized() {
return isInitialized;
}
/**
* Creates a {@link ISaveEventListener} instance reading out the <code>changeRecorder</code> and
* calling <code>onSavedResource()</code> after the user has saved the edited file.
*
* @return The newly created {@link SaveActionExecutionListener}.
*/
protected ISaveEventListener createSaveActionExecutionListener() {
return new ISaveEventListener() {
@Override
public void onPreSave() {
}
@Override
public void onPostSave() {
List<TransactionalChange> changes = readOutChangesAndEndRecording();
LOGGER.trace("Detected a user save action, got change descriptions: " + changes);
onSavedResource(changes);
resetChangeRecorder();
}
@Override
public void postExecuteFailure(String commandId, ExecutionException exception) {
}
};
}
private void installResourceReloadListener() {
ResourceReloadListener rrl = new ResourceReloadListener(targetResource) {
@Override
protected void onResourceUnloaded() {
LOGGER.trace("Detected a resource unload event, deactivating the change recorder.");
deactivateChangeRecorder();
}
@Override
protected void onResourceLoaded() {
LOGGER.trace("Detected a resource load event, resetting the change recorder.");
resetChangeRecorder();
}
};
targetResource.eAdapters().add(rrl);
}
private void deactivateChangeRecorder() {
if (changeRecorder != null) {
changeRecorder.dispose();
}
changeRecorder = null;
}
/**
* Resets the change recorder by replacing it with a new one.
*/
protected void resetChangeRecorder() {
deactivateChangeRecorder();
UuidGeneratorAndResolver globalUuidGeneratorAndResolver = virtualModel != null
? virtualModel.getUuidGeneratorAndResolver()
: null;
UuidGeneratorAndResolver localUuidResolver = new UuidGeneratorAndResolverImpl(globalUuidGeneratorAndResolver,
targetResource.getResourceSet(), true);
changeRecorder = new AtomicEmfChangeRecorder(localUuidResolver);
changeRecorder.addToRecording(targetResource);
changeRecorder.beginRecording();
}
/**
* @return The changes recorded since last resetting the change recorder.
*/
protected List<TransactionalChange> readOutChangesAndEndRecording() {
changeRecorder.endRecording();
return changeRecorder.getChanges();
}
/**
* Initializes the listener. After calling this method, the listener is active until
* <code>dispose()</code> is called.
*/
public void initialize(VirtualModel virtualModel) {
this.virtualModel = virtualModel;
if (!isInitialized) {
resetChangeRecorder();
installResourceReloadListener();
saveActionListenerManager.install(createSaveActionExecutionListener());
isInitialized = true;
} else {
LOGGER.warn("Called initialize() for an initialized instance," + " ignoring the call");
}
}
/**
* Disposes the listener. After calling this method, the listener is inactive until
* <code>initialize()</code> is called.
*/
public void dispose() {
if (isInitialized) {
saveActionListenerManager.dispose();
if (changeRecorder != null) {
changeRecorder.dispose();
}
isInitialized = false;
} else {
LOGGER.warn("Called dispose() for an uninitialized instance," + " ignoring the call");
}
}
/**
* @return The monitored EMF resource.
*/
public Resource getMonitoredResource() {
return targetResource;
}
/**
* The "listener" method getting called when the user saves the edited file.
*
* @param changeDescription
* The EMF {@link ChangeDescription} describing the changes to the EMF model since last
* saving it (rsp. since opening it, in case it has not been saved yet). This object is
* provided "as is" from a {@link ChangeRecorder} instance.
*/
protected abstract void onSavedResource(List<TransactionalChange> changeDescription);
}
| Deactivate strict mode in EMF editor monitor | bundles/extensions/emfdomain/tools.vitruv.extensions.emf.monitorededitor/src/tools/vitruv/domains/emf/monitorededitor/monitor/EMFModelChangeRecordingEditorSaveListener.java | Deactivate strict mode in EMF editor monitor |
|
Java | agpl-3.0 | d62a9184e037180d1507dbfaddf1696b76070a35 | 0 | vbelakov/rstudio,thklaus/rstudio,pssguy/rstudio,john-r-mcpherson/rstudio,jar1karp/rstudio,thklaus/rstudio,jrnold/rstudio,piersharding/rstudio,sfloresm/rstudio,brsimioni/rstudio,tbarrongh/rstudio,jrnold/rstudio,john-r-mcpherson/rstudio,edrogers/rstudio,jar1karp/rstudio,suribes/rstudio,githubfun/rstudio,more1/rstudio,jzhu8803/rstudio,suribes/rstudio,jzhu8803/rstudio,githubfun/rstudio,piersharding/rstudio,tbarrongh/rstudio,brsimioni/rstudio,sfloresm/rstudio,brsimioni/rstudio,githubfun/rstudio,suribes/rstudio,piersharding/rstudio,jzhu8803/rstudio,piersharding/rstudio,JanMarvin/rstudio,jrnold/rstudio,vbelakov/rstudio,vbelakov/rstudio,thklaus/rstudio,vbelakov/rstudio,brsimioni/rstudio,brsimioni/rstudio,JanMarvin/rstudio,pssguy/rstudio,jar1karp/rstudio,tbarrongh/rstudio,githubfun/rstudio,more1/rstudio,jrnold/rstudio,piersharding/rstudio,suribes/rstudio,john-r-mcpherson/rstudio,edrogers/rstudio,pssguy/rstudio,JanMarvin/rstudio,jar1karp/rstudio,jzhu8803/rstudio,jrnold/rstudio,thklaus/rstudio,jrnold/rstudio,pssguy/rstudio,suribes/rstudio,jar1karp/rstudio,thklaus/rstudio,john-r-mcpherson/rstudio,sfloresm/rstudio,tbarrongh/rstudio,jrnold/rstudio,jzhu8803/rstudio,thklaus/rstudio,piersharding/rstudio,vbelakov/rstudio,jar1karp/rstudio,JanMarvin/rstudio,suribes/rstudio,sfloresm/rstudio,tbarrongh/rstudio,more1/rstudio,edrogers/rstudio,brsimioni/rstudio,tbarrongh/rstudio,jrnold/rstudio,JanMarvin/rstudio,JanMarvin/rstudio,JanMarvin/rstudio,more1/rstudio,githubfun/rstudio,jar1karp/rstudio,jzhu8803/rstudio,pssguy/rstudio,pssguy/rstudio,more1/rstudio,sfloresm/rstudio,sfloresm/rstudio,jrnold/rstudio,thklaus/rstudio,suribes/rstudio,suribes/rstudio,john-r-mcpherson/rstudio,sfloresm/rstudio,pssguy/rstudio,tbarrongh/rstudio,tbarrongh/rstudio,jar1karp/rstudio,JanMarvin/rstudio,brsimioni/rstudio,sfloresm/rstudio,jzhu8803/rstudio,john-r-mcpherson/rstudio,jar1karp/rstudio,more1/rstudio,pssguy/rstudio,jzhu8803/rstudio,more1/rstudio,thklaus/rstudio,vbelakov/rstudio,edrogers/rstudio,JanMarvin/rstudio,edrogers/rstudio,githubfun/rstudio,edrogers/rstudio,more1/rstudio,githubfun/rstudio,piersharding/rstudio,brsimioni/rstudio,edrogers/rstudio,john-r-mcpherson/rstudio,piersharding/rstudio,edrogers/rstudio,piersharding/rstudio,githubfun/rstudio,john-r-mcpherson/rstudio,vbelakov/rstudio,vbelakov/rstudio | /*
* SnippetHelper.java
*
* Copyright (C) 2009-12 by RStudio, Inc.
*
* Unless you have received this program directly from RStudio pursuant
* to the terms of a commercial license agreement with RStudio, then
* this program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
package org.rstudio.studio.client.workbench.snippets;
import com.google.gwt.core.client.JavaScriptException;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.core.client.JsArray;
import com.google.gwt.core.client.JsArrayString;
import org.rstudio.core.client.JsArrayUtil;
import org.rstudio.studio.client.common.FilePathUtils;
import org.rstudio.studio.client.workbench.snippets.model.Snippet;
import org.rstudio.studio.client.workbench.snippets.model.SnippetData;
import org.rstudio.studio.client.workbench.snippets.model.SnippetsChangedEvent;
import org.rstudio.studio.client.workbench.views.source.editors.text.AceEditor;
import org.rstudio.studio.client.workbench.views.source.editors.text.ace.AceEditorNative;
import java.util.ArrayList;
public class SnippetHelper
{
static class SnippetManager extends JavaScriptObject
{
protected SnippetManager() {}
}
public SnippetHelper(AceEditor editor)
{
this(editor, null);
}
public SnippetHelper(AceEditor editor, String path)
{
editor_ = editor;
native_ = editor.getWidget().getEditor();
manager_ = getSnippetManager();
path_ = path;
}
private static final native SnippetManager getSnippetManager() /*-{
return $wnd.require("ace/snippets").snippetManager;
}-*/;
public ArrayList<String> getCppSnippets()
{
ensureSnippetsLoaded();
ensureCustomCppSnippetsLoaded();
return JsArrayUtil.fromJsArrayString(
getAvailableSnippetsImpl(manager_, "c_cpp"));
}
public Snippet getCppSnippet(String name)
{
return getSnippet(manager_, "c_cpp", name);
}
private void ensureCustomCppSnippetsLoaded()
{
if (!customCppSnippetsLoaded_)
{
addCustomCppSnippets(manager_);
customCppSnippetsLoaded_ = true;
}
}
private final native void addCustomCppSnippets(SnippetManager manager)
/*-{
var snippetText = [
"## Header guard",
"snippet once",
"\t#ifndef ${1:`HeaderGuardFileName`}",
"\t#define ${1:`HeaderGuardFileName`}",
"",
"\t${0}",
"",
"\t#endif // ${1:`HeaderGuardFileName`}",
"##",
"## Anonymous namespace",
"snippet ans",
"\tnamespace {",
"\t${0}",
"\t} // anonymous namespace",
"##",
"## Named namespace",
"snippet ns",
"\tnamespace ${1} {",
"\t${0}",
"\t} // namespace ${1}",
"##",
"## class",
"snippet cls",
"\tclass ${1} {",
"\tpublic:",
"\t\t${2}",
"\tprivate:",
"\t\t${3}",
"\t};",
"##",
"## struct",
"snippet str",
"\tstruct ${1} {",
"\t\t${0}",
"\t};",
"##",
"## cerr",
"snippet cerr",
"\tstd::cerr << ${1} << std::endl;${0}",
"##",
"snippet main",
"\tint main(int argc, char* argv[]) {",
"\t\t${0}",
"\t}",
].join("\n");
var parsed = manager.parseSnippetFile(snippetText);
manager.register(parsed, "c_cpp");
}-*/;
private void ensureCustomRSnippetsLoaded()
{
if (!customRSnippetsLoaded_)
{
loadCustomRSnippets(manager_);
customRSnippetsLoaded_ = true;
}
}
private final native void loadCustomRSnippets(SnippetManager manager)
/*-{
var snippetText = [
"snippet sserver",
"\tshinyServer(function(input, output, session) {",
"\t\t${0}",
"\t})",
"snippet dig",
"\tdevtools::install_github(\"${0}\")",
"## S4",
"snippet setG",
"\tsetGeneric(\"${1:generic}\", function(${2:x, ...}) {",
"\t\tstandardGeneric(\"${1:generic}\")",
"\t})",
"snippet setM",
"\tsetMethod(\"${1:generic}\", ${2:\"class\"}, function(${3:object}, ...) {",
"\t\t${0}",
"\t})",
"snippet setC",
"\tsetClass(\"${1:Class}\", slots = c(${2:name = \"type\"}))"
].join("\n");
var parsed = manager.parseSnippetFile(snippetText);
manager.register(parsed, "r");
}-*/;
public ArrayList<String> getAvailableSnippets()
{
ensureSnippetsLoaded();
return JsArrayUtil.fromJsArrayString(
getAvailableSnippetsImpl(manager_, getEditorMode()));
}
private final void ensureSnippetsLoaded()
{
ensureRSnippetsLoaded();
ensureCppSnippetsLoaded();
}
private void ensureRSnippetsLoaded()
{
ensureAceSnippetsLoaded("r", manager_);
ensureCustomRSnippetsLoaded();
}
private void ensureCppSnippetsLoaded()
{
ensureAceSnippetsLoaded("c_cpp", manager_);
ensureCustomCppSnippetsLoaded();
}
// Parse a snippet file and apply the parsed snippets for
// mode 'mode'. Returns an associated exception on failure,
// or 'null' on success.
public static final native JavaScriptException loadSnippetsForMode(
String mode,
String snippetText,
SnippetManager manager)
/*-{
// Parse snippets passed through
var snippets = null;
try {
snippets = manager.parseSnippetFile(snippetText)
} catch (e) {
return e;
}
// Clear old snippets associated with this mode
delete manager.snippetMap[mode];
delete manager.snippetNameMap[mode];
// Overwrite the old snippets stored
var old = $wnd.require("ace/snippets/" + mode);
if (old != null) {
old.$snippetText = old.snippetText;
old.snippetText = snippetText;
}
// Apply new snippets
manager.register(snippets, mode);
return null;
}-*/;
public static final JavaScriptException loadSnippetsForMode(
String mode,
String snippetText)
{
return loadSnippetsForMode(
mode,
snippetText,
getSnippetManager());
}
private static final native void ensureAceSnippetsLoaded(
String mode,
SnippetManager manager) /*-{
var snippetsForMode = manager.snippetNameMap[mode];
if (!snippetsForMode) {
var id = "ace/snippets/" + mode;
var m = $wnd.require(id);
if (!m) {
console.log("Failed load Ace snippets for mode '" + mode + "'");
return;
}
if (!manager.files)
manager.files = {};
manager.files[id] = m;
if (!m.snippets && m.snippetText)
m.snippets = manager.parseSnippetFile(m.snippetText);
manager.register(m.snippets || [], m.scope);
}
}-*/;
public void applySnippet(String token, String snippetName)
{
editor_.expandSelectionLeft(token.length());
String snippetContent = transformMacros(
getSnippetContents(snippetName));
applySnippetImpl(snippetContent, manager_, editor_.getWidget().getEditor());
}
private String replaceFilename(String snippet)
{
String fileName = FilePathUtils.fileNameSansExtension(path_);
return snippet.replaceAll("`Filename.*`", fileName);
}
private String replaceHeaderGuard(String snippet)
{
// Munge the path a bit
String path = path_;
if (path.startsWith("~/"))
path = path.substring(2);
int instIncludeIdx = path.indexOf("/inst/include/");
if (instIncludeIdx != -1)
path = path.substring(instIncludeIdx + 15);
int srcIdx = path.indexOf("/src/");
if (srcIdx != -1)
path = path.substring(srcIdx + 6);
path = path.replaceAll("[./]", "_");
path = path.toUpperCase();
return snippet.replaceAll("`HeaderGuardFileName`", path);
}
private String transformMacros(String snippet)
{
if (path_ != null)
{
snippet = replaceFilename(snippet);
snippet = replaceHeaderGuard(snippet);
}
return snippet;
}
public final native void applySnippetImpl(
String snippetContent,
SnippetManager manager,
AceEditorNative editor) /*-{
manager.insertSnippet(editor, snippetContent);
}-*/;
private static final native JsArrayString getAvailableSnippetsImpl(
SnippetManager manager,
String mode) /*-{
var snippetsForMode = manager.snippetNameMap[mode];
if (snippetsForMode)
return Object.keys(snippetsForMode);
return [];
}-*/;
private static final native Snippet getSnippet(
SnippetManager manager,
String mode,
String name) /*-{
var snippetsForMode = manager.snippetNameMap[mode];
if (snippetsForMode)
return snippetsForMode[name];
else
return null;
}-*/;
// NOTE: this function assumes you've already called ensureSnippetsLoaded
// (this is a safe assumption because in order to enumerate snippet names
// you need to call the ensure* functions)
public String getSnippetContents(String snippetName)
{
return getSnippet(manager_, getEditorMode(), snippetName).getContent();
}
private String getEditorMode()
{
String mode = editor_.getLanguageMode(
editor_.getCursorPosition());
if (mode == null)
mode = "r";
return mode.toLowerCase();
}
public static void onSnippetsChanged(SnippetsChangedEvent event)
{
SnippetManager manager = getSnippetManager();
JsArray<SnippetData> data = event.getData();
for (int i = 0; i < data.length(); i++)
{
SnippetData snippetData = data.get(i);
loadSnippetsForMode(
snippetData.getMode(),
snippetData.getContents(),
manager);
}
}
private final AceEditor editor_;
private final AceEditorNative native_;
private final SnippetManager manager_;
private final String path_;
private static boolean customCppSnippetsLoaded_;
private static boolean customRSnippetsLoaded_;
}
| src/gwt/src/org/rstudio/studio/client/workbench/snippets/SnippetHelper.java | /*
* SnippetHelper.java
*
* Copyright (C) 2009-12 by RStudio, Inc.
*
* Unless you have received this program directly from RStudio pursuant
* to the terms of a commercial license agreement with RStudio, then
* this program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
package org.rstudio.studio.client.workbench.snippets;
import com.google.gwt.core.client.JavaScriptException;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.core.client.JsArray;
import com.google.gwt.core.client.JsArrayString;
import org.rstudio.core.client.JsArrayUtil;
import org.rstudio.studio.client.common.FilePathUtils;
import org.rstudio.studio.client.workbench.snippets.model.Snippet;
import org.rstudio.studio.client.workbench.snippets.model.SnippetData;
import org.rstudio.studio.client.workbench.snippets.model.SnippetsChangedEvent;
import org.rstudio.studio.client.workbench.views.source.editors.text.AceEditor;
import org.rstudio.studio.client.workbench.views.source.editors.text.ace.AceEditorNative;
import java.util.ArrayList;
public class SnippetHelper
{
static class SnippetManager extends JavaScriptObject
{
protected SnippetManager() {}
}
public SnippetHelper(AceEditor editor)
{
this(editor, null);
}
public SnippetHelper(AceEditor editor, String path)
{
editor_ = editor;
native_ = editor.getWidget().getEditor();
manager_ = getSnippetManager();
path_ = path;
}
private static final native SnippetManager getSnippetManager() /*-{
return $wnd.require("ace/snippets").snippetManager;
}-*/;
public ArrayList<String> getCppSnippets()
{
ensureSnippetsLoaded();
ensureCustomCppSnippetsLoaded();
return JsArrayUtil.fromJsArrayString(
getAvailableSnippetsImpl(manager_, "c_cpp"));
}
public Snippet getCppSnippet(String name)
{
return getSnippet(manager_, "c_cpp", name);
}
private void ensureCustomCppSnippetsLoaded()
{
if (!customCppSnippetsLoaded_)
{
addCustomCppSnippets(manager_);
customCppSnippetsLoaded_ = true;
}
}
private final native void addCustomCppSnippets(SnippetManager manager)
/*-{
var snippetText = [
"## Header guard",
"snippet once",
"\t#ifndef ${1:`HeaderGuardFileName`}",
"\t#define ${1:`HeaderGuardFileName`}",
"",
"\t${0}",
"",
"\t#endif // ${1:`HeaderGuardFileName`}",
"##",
"## Anonymous namespace",
"snippet ans",
"\tnamespace {",
"\t${0}",
"\t} // anonymous namespace",
"##",
"## Named namespace",
"snippet ns",
"\tnamespace ${1} {",
"\t${0}",
"\t} // namespace ${1}",
"##",
"## class",
"snippet cls",
"\tclass ${1} {",
"\tpublic:",
"\t\t${2}",
"\tprivate:",
"\t\t${3}",
"\t};",
"##",
"## struct",
"snippet str",
"\tstruct ${1} {",
"\t\t${0}",
"\t};",
"##",
"## cerr",
"snippet cerr",
"\tstd::cerr << ${1} << std::endl;${0}",
"##",
"snippet main",
"\tint main(int argc, char* argv[]) {",
"\t\t${0}",
"\t}",
].join("\n");
var parsed = manager.parseSnippetFile(snippetText);
manager.register(parsed, "c_cpp");
}-*/;
private void ensureCustomRSnippetsLoaded()
{
if (!customRSnippetsLoaded_)
{
loadCustomRSnippets(manager_);
customRSnippetsLoaded_ = true;
}
}
private final native void loadCustomRSnippets(SnippetManager manager)
/*-{
var snippetText = [
"snippet sserver",
"\tshinyServer(function(input, output, session) {",
"\t\t${0}",
"\t})",
"snippet dig",
"\tdevtools::install_github(\"${0}\")",
"## S4",
"snippet setG",
"\tsetGeneric(\"${1:generic}\", function(${2:x, ...}) {",
"\t\tstandardGeneric(\"${1:generic}\")",
"\t})",
"snippet setM",
"\tsetMethod(\"${1:generic}\", ${2:\"class\"}, function(${3:object}, ...) {",
"\t\t${0}",
"\t})",
"snippet setC",
"\tsetClass(\"${1:Class}\", slots = c(${2:name = \"type\"}))"
].join("\n");
var parsed = manager.parseSnippetFile(snippetText);
manager.register(parsed, "r");
}-*/;
public ArrayList<String> getAvailableSnippets()
{
ensureSnippetsLoaded();
return JsArrayUtil.fromJsArrayString(
getAvailableSnippetsImpl(manager_, getEditorMode()));
}
private final void ensureSnippetsLoaded()
{
ensureRSnippetsLoaded();
ensureCppSnippetsLoaded();
}
private void ensureRSnippetsLoaded()
{
ensureAceSnippetsLoaded("r", manager_);
ensureCustomRSnippetsLoaded();
}
private void ensureCppSnippetsLoaded()
{
ensureAceSnippetsLoaded("c_cpp", manager_);
ensureCustomCppSnippetsLoaded();
}
// Parse a snippet file and apply the parsed snippets for
// mode 'mode'. Returns an associated exception on failure,
// or 'null' on success.
public static final native JavaScriptException loadSnippetsForMode(
String mode,
String snippetText,
SnippetManager manager)
/*-{
// Parse snippets passed through
var snippets = null;
try {
snippets = manager.parseSnippetFile(snippetText)
} catch (e) {
return e;
}
// Clear old snippets associated with this mode
delete manager.snippetMap[mode];
delete manager.snippetNameMap[mode];
// Overwrite the old snippets stored
var old = $wnd.require("ace/snippets/" + mode);
if (old != null)
old.snippetText = snippetText;
// Apply new snippets
manager.register(snippets, mode);
return null;
}-*/;
public static final JavaScriptException loadSnippetsForMode(
String mode,
String snippetText)
{
return loadSnippetsForMode(
mode,
snippetText,
getSnippetManager());
}
private static final native void ensureAceSnippetsLoaded(
String mode,
SnippetManager manager) /*-{
var snippetsForMode = manager.snippetNameMap[mode];
if (!snippetsForMode) {
var id = "ace/snippets/" + mode;
var m = $wnd.require(id);
if (!m) {
console.log("Failed load Ace snippets for mode '" + mode + "'");
return;
}
if (!manager.files)
manager.files = {};
manager.files[id] = m;
if (!m.snippets && m.snippetText)
m.snippets = manager.parseSnippetFile(m.snippetText);
manager.register(m.snippets || [], m.scope);
}
}-*/;
public void applySnippet(String token, String snippetName)
{
editor_.expandSelectionLeft(token.length());
String snippetContent = transformMacros(
getSnippetContents(snippetName));
applySnippetImpl(snippetContent, manager_, editor_.getWidget().getEditor());
}
private String replaceFilename(String snippet)
{
String fileName = FilePathUtils.fileNameSansExtension(path_);
return snippet.replaceAll("`Filename.*`", fileName);
}
private String replaceHeaderGuard(String snippet)
{
// Munge the path a bit
String path = path_;
if (path.startsWith("~/"))
path = path.substring(2);
int instIncludeIdx = path.indexOf("/inst/include/");
if (instIncludeIdx != -1)
path = path.substring(instIncludeIdx + 15);
int srcIdx = path.indexOf("/src/");
if (srcIdx != -1)
path = path.substring(srcIdx + 6);
path = path.replaceAll("[./]", "_");
path = path.toUpperCase();
return snippet.replaceAll("`HeaderGuardFileName`", path);
}
private String transformMacros(String snippet)
{
if (path_ != null)
{
snippet = replaceFilename(snippet);
snippet = replaceHeaderGuard(snippet);
}
return snippet;
}
public final native void applySnippetImpl(
String snippetContent,
SnippetManager manager,
AceEditorNative editor) /*-{
manager.insertSnippet(editor, snippetContent);
}-*/;
private static final native JsArrayString getAvailableSnippetsImpl(
SnippetManager manager,
String mode) /*-{
var snippetsForMode = manager.snippetNameMap[mode];
if (snippetsForMode)
return Object.keys(snippetsForMode);
return [];
}-*/;
private static final native Snippet getSnippet(
SnippetManager manager,
String mode,
String name) /*-{
var snippetsForMode = manager.snippetNameMap[mode];
if (snippetsForMode)
return snippetsForMode[name];
else
return null;
}-*/;
// NOTE: this function assumes you've already called ensureSnippetsLoaded
// (this is a safe assumption because in order to enumerate snippet names
// you need to call the ensure* functions)
public String getSnippetContents(String snippetName)
{
return getSnippet(manager_, getEditorMode(), snippetName).getContent();
}
private String getEditorMode()
{
String mode = editor_.getLanguageMode(
editor_.getCursorPosition());
if (mode == null)
mode = "r";
return mode.toLowerCase();
}
public static void onSnippetsChanged(SnippetsChangedEvent event)
{
SnippetManager manager = getSnippetManager();
JsArray<SnippetData> data = event.getData();
for (int i = 0; i < data.length(); i++)
{
SnippetData snippetData = data.get(i);
loadSnippetsForMode(
snippetData.getMode(),
snippetData.getContents(),
manager);
}
}
private final AceEditor editor_;
private final AceEditorNative native_;
private final SnippetManager manager_;
private final String path_;
private static boolean customCppSnippetsLoaded_;
private static boolean customRSnippetsLoaded_;
}
| cache old Ace snippets (support 'revert' later)
| src/gwt/src/org/rstudio/studio/client/workbench/snippets/SnippetHelper.java | cache old Ace snippets (support 'revert' later) |
|
Java | agpl-3.0 | 2ae07670581199244900507203eac61731451df8 | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | abcf0a82-2e60-11e5-9284-b827eb9e62be | hello.java | abc9abdc-2e60-11e5-9284-b827eb9e62be | abcf0a82-2e60-11e5-9284-b827eb9e62be | hello.java | abcf0a82-2e60-11e5-9284-b827eb9e62be |
|
Java | agpl-3.0 | 3a69704e4214be329518913cc763377f67a67421 | 0 | David-Development/ownCloud-Account-Importer | package com.nextcloud.android.sso.model;
import java.io.Serializable;
/**
* Nextcloud SingleSignOn
*
* @author David Luhmer
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
public class ExceptionMessage implements Serializable {
public String title;
public String message;
public ExceptionMessage(String title, String message) {
this.title = title;
this.message = message;
}
}
| src/main/java/com/nextcloud/android/sso/model/ExceptionMessage.java | package com.nextcloud.android.sso.model;
/**
* Nextcloud SingleSignOn
*
* @author David Luhmer
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
public class ExceptionMessage {
public String title;
public String message;
public ExceptionMessage(String title, String message) {
this.title = title;
this.message = message;
}
}
| ExceptionMessage should implement Serializable
To hopefully fix this strange error:
```
2020-05-07 19:52:28.526 23303-23303/it.niedermann.nextcloud.deck.dev E/AndroidRuntime: FATAL EXCEPTION: main
Process: it.niedermann.nextcloud.deck.dev, PID: 23303
java.lang.RuntimeException: Parcelable encountered IOException writing serializable object (name = com.nextcloud.android.sso.exceptions.NextcloudHttpRequestFailedException)
at android.os.Parcel.writeSerializable(Parcel.java:1833)
at android.os.Parcel.writeValue(Parcel.java:1780)
at android.os.Parcel.writeArrayMapInternal(Parcel.java:928)
at android.os.BaseBundle.writeToParcelInner(BaseBundle.java:1584)
at android.os.Bundle.writeToParcel(Bundle.java:1253)
at android.os.Parcel.writeBundle(Parcel.java:997)
at androidx.fragment.app.FragmentState.writeToParcel(FragmentState.java:125)
at android.os.Parcel.writeTypedObject(Parcel.java:1634)
at android.os.Parcel.writeTypedList(Parcel.java:1513)
at android.os.Parcel.writeTypedList(Parcel.java:1470)
at androidx.fragment.app.FragmentManagerState.writeToParcel(FragmentManagerState.java:51)
at android.os.Parcel.writeParcelable(Parcel.java:1801)
at android.os.Parcel.writeValue(Parcel.java:1707)
at android.os.Parcel.writeArrayMapInternal(Parcel.java:928)
at android.os.BaseBundle.writeToParcelInner(BaseBundle.java:1584)
at android.os.Bundle.writeToParcel(Bundle.java:1253)
at android.app.IActivityTaskManager$Stub$Proxy.activityStopped(IActivityTaskManager.java:4505)
at android.app.servertransaction.PendingTransactionActions$StopInfo.run(PendingTransactionActions.java:145)
at android.os.Handler.handleCallback(Handler.java:883)
at android.os.Handler.dispatchMessage(Handler.java:100)
at android.os.Looper.loop(Looper.java:214)
at android.app.ActivityThread.main(ActivityThread.java:7356)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:492)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:930)
Caused by: java.io.NotSerializableException: com.nextcloud.android.sso.model.ExceptionMessage
at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1240)
at java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1604)
at java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1565)
at java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1488)
at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1234)
at java.io.ObjectOutputStream.writeObject(ObjectOutputStream.java:354)
at android.os.Parcel.writeSerializable(Parcel.java:1828)
at android.os.Parcel.writeValue(Parcel.java:1780)
at android.os.Parcel.writeArrayMapInternal(Parcel.java:928)
at android.os.BaseBundle.writeToParcelInner(BaseBundle.java:1584)
at android.os.Bundle.writeToParcel(Bundle.java:1253)
at android.os.Parcel.writeBundle(Parcel.java:997)
at androidx.fragment.app.FragmentState.writeToParcel(FragmentState.java:125)
at android.os.Parcel.writeTypedObject(Parcel.java:1634)
at android.os.Parcel.writeTypedList(Parcel.java:1513)
at android.os.Parcel.writeTypedList(Parcel.java:1470)
at androidx.fragment.app.FragmentManagerState.writeToParcel(FragmentManagerState.java:51)
at android.os.Parcel.writeParcelable(Parcel.java:1801)
at android.os.Parcel.writeValue(Parcel.java:1707)
at android.os.Parcel.writeArrayMapInternal(Parcel.java:928)
at android.os.BaseBundle.writeToParcelInner(BaseBundle.java:1584)
at android.os.Bundle.writeToParcel(Bundle.java:1253)
at android.app.IActivityTaskManager$Stub$Proxy.activityStopped(IActivityTaskManager.java:4505)
at android.app.servertransaction.PendingTransactionActions$StopInfo.run(PendingTransactionActions.java:145)
at android.os.Handler.handleCallback(Handler.java:883)
at android.os.Handler.dispatchMessage(Handler.java:100)
at android.os.Looper.loop(Looper.java:214)
at android.app.ActivityThread.main(ActivityThread.java:7356)
at java.lang.reflect.Method.invoke(Native Method)
at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:492)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:930)
```
Signed-off-by: desperateCoder <[email protected]>
| src/main/java/com/nextcloud/android/sso/model/ExceptionMessage.java | ExceptionMessage should implement Serializable |
|
Java | lgpl-2.1 | 899df43fc8caa7ee1a5f3784e597526605f03261 | 0 | zebrafishmine/intermine,tomck/intermine,tomck/intermine,justincc/intermine,zebrafishmine/intermine,justincc/intermine,kimrutherford/intermine,JoeCarlson/intermine,elsiklab/intermine,justincc/intermine,justincc/intermine,zebrafishmine/intermine,kimrutherford/intermine,JoeCarlson/intermine,JoeCarlson/intermine,elsiklab/intermine,joshkh/intermine,JoeCarlson/intermine,joshkh/intermine,JoeCarlson/intermine,JoeCarlson/intermine,tomck/intermine,tomck/intermine,kimrutherford/intermine,justincc/intermine,kimrutherford/intermine,elsiklab/intermine,kimrutherford/intermine,zebrafishmine/intermine,justincc/intermine,Arabidopsis-Information-Portal/intermine,elsiklab/intermine,elsiklab/intermine,joshkh/intermine,elsiklab/intermine,joshkh/intermine,zebrafishmine/intermine,elsiklab/intermine,elsiklab/intermine,kimrutherford/intermine,JoeCarlson/intermine,tomck/intermine,tomck/intermine,Arabidopsis-Information-Portal/intermine,Arabidopsis-Information-Portal/intermine,joshkh/intermine,Arabidopsis-Information-Portal/intermine,justincc/intermine,joshkh/intermine,JoeCarlson/intermine,tomck/intermine,zebrafishmine/intermine,Arabidopsis-Information-Portal/intermine,tomck/intermine,Arabidopsis-Information-Portal/intermine,Arabidopsis-Information-Portal/intermine,JoeCarlson/intermine,kimrutherford/intermine,joshkh/intermine,elsiklab/intermine,joshkh/intermine,tomck/intermine,justincc/intermine,kimrutherford/intermine,joshkh/intermine,justincc/intermine,zebrafishmine/intermine,kimrutherford/intermine,Arabidopsis-Information-Portal/intermine,zebrafishmine/intermine,zebrafishmine/intermine,Arabidopsis-Information-Portal/intermine | package org.intermine.web.logic.query;
/*
* Copyright (C) 2002-2011 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Collection;
import java.util.Date;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.intermine.api.bag.BagManager;
import org.intermine.api.bag.BagQueryConfig;
import org.intermine.api.config.ClassKeyHelper;
import org.intermine.api.profile.InterMineBag;
import org.intermine.api.profile.Profile;
import org.intermine.metadata.ClassDescriptor;
import org.intermine.metadata.FieldDescriptor;
import org.intermine.metadata.ReferenceDescriptor;
import org.intermine.objectstore.ObjectStoreSummary;
import org.intermine.objectstore.query.BagConstraint;
import org.intermine.objectstore.query.ConstraintOp;
import org.intermine.objectstore.query.SimpleConstraint;
import org.intermine.pathquery.ConstraintValueParser;
import org.intermine.pathquery.Path;
import org.intermine.pathquery.PathConstraint;
import org.intermine.pathquery.PathConstraintAttribute;
import org.intermine.pathquery.PathConstraintBag;
import org.intermine.pathquery.PathConstraintLookup;
import org.intermine.pathquery.PathConstraintLoop;
import org.intermine.pathquery.PathConstraintMultiValue;
import org.intermine.pathquery.PathConstraintNull;
import org.intermine.pathquery.PathConstraintSubclass;
import org.intermine.pathquery.PathException;
import org.intermine.pathquery.PathQuery;
import org.intermine.template.SwitchOffAbility;
import org.intermine.util.StringUtil;
import org.intermine.web.autocompletion.AutoCompleter;
import org.intermine.web.logic.querybuilder.DisplayPath;
/**
* Representation of a PathQuery constraint for use by JSP pages. This object provides methods
* needed to populate constraint editing boxes and dropdowns, find available bag names, etc. Can
* either represent a new constraint to be added with no values set or an existing constraint that
* is being edited.
*
* Get methods return null if no values are available
*
* @author Richard Smith
*/
public class DisplayConstraint
{
private Path path;
private List<DisplayConstraintOption> validOps;
private AutoCompleter ac;
private ObjectStoreSummary oss;
private String endCls;
private String fieldName;
private BagQueryConfig bagQueryConfig;
private Map<String, List<FieldDescriptor>> classKeys;
private BagManager bagManager;
private Profile profile;
private String constraintLabel;
private List<DisplayConstraintOption> fixedOps;
private PathConstraint con;
private PathQuery query;
private String code;
private boolean editableInTemplate;
private SwitchOffAbility switchOffAbility;
private boolean isBagSelected;
private String selectedBagValue;
private ConstraintOp selectedBagOp;
private List<Object> templateSummary;
private boolean showExtraConstraint = false;
/**
* Construct for a new constraint that is being added to a query.
* @param path The path that is being constrained
* @param profile user editing the query, used to fetch available bags
* @param query the PathQuery, in order to provide information on candidate loops
* @param ac auto completer
* @param oss summary data for the ObjectStore contents
* @param bagQueryConfig addition details for needed for LOOKUP constraints
* @param classKeys identifier field config, needed for LOOKUP constraints
* @param bagManager provides access to saved bags
*/
protected DisplayConstraint(Path path, Profile profile, PathQuery query, AutoCompleter ac,
ObjectStoreSummary oss, BagQueryConfig bagQueryConfig,
Map<String, List<FieldDescriptor>> classKeys, BagManager bagManager) {
init(path, profile, query, ac, oss, bagQueryConfig, classKeys, bagManager);
}
/**
* Construct for an existing constraint that is being edited.
* @param path The path that is being constrained
* @param con the constraint being edited
* @param label text associated with this constraint, if a template query
* @param code the code of this constraint in the query
* @param editableInTemplate true if this is a template query and this constraint is editable
* @param switchOffAbility if the contraint is on, off, locked
* @param profile user editing the query, used to fetch available bags
* @param query the PathQuery, in order to provide information on candidate loops
* @param ac auto completer
* @param oss summary data for the ObjectStore contents
* @param bagQueryConfig addition details for needed for LOOKUP constraints
* @param classKeys identifier field config, needed for LOOKUP constraints
* @param bagManager provides access to saved bags
*/
protected DisplayConstraint(Path path, PathConstraint con, String label, String code,
boolean editableInTemplate, SwitchOffAbility switchOffAbility, Profile profile,
PathQuery query, AutoCompleter ac,
ObjectStoreSummary oss, BagQueryConfig bagQueryConfig,
Map<String, List<FieldDescriptor>> classKeys, BagManager bagManager,
List<Object> templateSummary) {
init(path, profile, query, ac, oss, bagQueryConfig, classKeys, bagManager);
this.con = con;
this.constraintLabel = label;
this.code = code;
this.editableInTemplate = editableInTemplate;
this.switchOffAbility = switchOffAbility;
this.templateSummary = templateSummary;
}
private void init(Path path, Profile profile, PathQuery query, AutoCompleter ac,
ObjectStoreSummary oss, BagQueryConfig bagQueryConfig,
Map<String, List<FieldDescriptor>> classKeys, BagManager bagManager) {
this.path = path;
this.ac = ac;
this.oss = oss;
this.endCls = getEndClass(path);
this.fieldName = getFieldName(path);
this.bagQueryConfig = bagQueryConfig;
this.classKeys = classKeys;
this.profile = profile;
this.query = query;
this.bagManager = bagManager;
this.isBagSelected = false;
if (isExtraConstraint()) {
this.showExtraConstraint = true;
}
}
private String getEndClass(Path path) {
if (path.isRootPath()) {
return path.getStartClassDescriptor().getType().getSimpleName();
} else {
return path.getLastClassDescriptor().getType().getSimpleName();
}
}
private String getFieldName(Path path) {
if (!path.isRootPath()) {
return path.getLastElement();
}
return null;
}
// TODO this should be in some common code
private String constraintStringValue(PathConstraint con) {
if (con instanceof PathConstraintAttribute) {
return ((PathConstraintAttribute) con).getValue();
} else if (con instanceof PathConstraintBag) {
return ((PathConstraintBag) con).getBag();
} else if (con instanceof PathConstraintLookup) {
return ((PathConstraintLookup) con).getValue();
} else if (con instanceof PathConstraintSubclass) {
return ((PathConstraintSubclass) con).getType();
} else if (con instanceof PathConstraintLoop) {
return ((PathConstraintLoop) con).getLoopPath();
} else if (con instanceof PathConstraintNull) {
return ((PathConstraintNull) con).getOp().toString();
}
return null;
}
/**
* If editing an existing constraint get the code for this constraint in the query, return null
* if creating a new constraint.
* @return the constraint code or null
*/
public String getCode() {
return code;
}
/**
* Return true if editing an existing template constraint and that constraint is editable.
* @return true if an editable template constraint, or null
*/
public boolean isEditableInTemplate() {
return editableInTemplate;
}
/**
* Get a representation of the path that is being constraint. DisplayPath provides convenience
* methods for use in JSP.
* @return the path being constrained
*/
public DisplayPath getPath() {
return new DisplayPath(path);
}
/**
* If editing an existing constraint, return the selected value. Otherwise return null. If
* an attribute constraint this will be the user entered. If a bag constraint, the selected
* bag name, etc. If an attribute constraint, but the use bag is setted, this will be the
* selectedBagValue setted
* @return the selected value or null
*/
public String getSelectedValue() {
if (isBagSelected) {
return selectedBagValue;
}
if (con != null) {
return constraintStringValue(con);
}
return null;
}
/**
*
*/
public String getOriginalValue() {
if (con != null) {
return constraintStringValue(con);
}
return null;
}
/**
* Returns the value collection if the constraint is a multivalue, otherwise return null.
*
* @return a Collection of Strings
*/
public Collection<String> getMultiValues() {
if (isMultiValueSelected()) {
return ((PathConstraintMultiValue) con).getValues();
}
return null;
}
/**
* If the constraint is a multivalue, returns the value collection
* represented as string separated by ', ', otherwise return an empty String.
*
* @return a String representing the multivalues of constraint
*/
public String getMultiValuesAsString() {
String multiValuesAsString = "";
if (getMultiValues() != null) {
for (String value : getMultiValues()) {
multiValuesAsString += value + ", ";
}
multiValuesAsString = multiValuesAsString.substring(0,
multiValuesAsString.lastIndexOf(","));
}
return multiValuesAsString;
}
/**
* Return true if editing an existing constraint and a bag has been selected.
* @return true if a bag has been selected
*/
public boolean isBagSelected() {
if (isBagSelected) {
return isBagSelected;
} else {
return (con != null && con instanceof PathConstraintBag);
}
}
/**
* Set if the bag is selected, used by the method isBagSelected that returns true,
* even if the constraint is an attribute constraint
* @param isBagSelected true if a bag has been selected
*/
public void setBagSelected(boolean isBagSelected) {
this.isBagSelected = isBagSelected;
}
/**
* Return true if editing an existing constraint and 'has a value' or 'has no value' has been
* selected.
* @return true if a null constraint was selected
*/
public boolean isNullSelected() {
return (con != null && con instanceof PathConstraintNull);
}
/**
* Return true if editing an existing having the attribute type boolean or Boolean
* @return true if the type is the primitive boolean or the object java.lang.Boolean
*/
public boolean isBoolean() {
String type = getPath().getType();
return ("boolean".equals(type) || "Boolean".equals(type));
}
/**
* Return true if editing an existing constraint and an attribute value or LOOKUP constraint
* was selected.
* @return true if an attribute/LOOKUP constraint was selected
*/
public boolean isValueSelected() {
if (con != null) {
return !(isBagSelected() || isNullSelected() || isLoopSelected());
}
return false;
}
/**
* Return true if editing an existing constraint and a loop value has been
* selected.
* @return true if a loop constraint was selected
*/
public boolean isLoopSelected() {
return (con != null && con instanceof PathConstraintLoop);
}
/**
* Return true if editing an existing constraint and a multivalue has been
* selected.
* @return true if a multivalue constraint was selected
*/
public boolean isMultiValueSelected() {
return (con != null && con instanceof PathConstraintMultiValue);
}
/**
* Return the last class in the path and fieldname as the title for the constraint.
* @return the title of this constraint
*/
public String getTitle() {
return endCls + (fieldName == null ? "" : " " + fieldName);
}
public String getEndClassName() {
return endCls;
}
/**
* Return the label associated with a constraint if editing a template query constraint.
* @return the constraint label
*/
public String getDescription() {
return constraintLabel;
}
/**
* Return a help message to display alongside the constraint, this will examine the constraint
* type and generate and appropriate message, e.g. list the key fields for LOOKUP constraints
* and explain the use of wildcards. Returns null when there is no appropriate help.
* @return the help message or null
*/
public String getHelpMessage() {
return DisplayConstraintHelpMessages.getHelpMessage(this);
}
/**
* If the bag is selected, return the value setted with the method setSelectedBagOp
* If editing an existing constraint return the operation used.
* Otherwise return null.
* @return the selected constraint op or null
*/
public DisplayConstraintOption getSelectedOp() {
if (isBagSelected) {
return new DisplayConstraintOption(selectedBagOp.toString(),
selectedBagOp.getIndex());
}
if (con != null) {
ConstraintOp selectedOp = con.getOp();
if (selectedOp != null) {
return new DisplayConstraintOption(selectedOp.toString(), selectedOp.getIndex());
}
}
return null;
}
/**
* Set the seletedBagOp
* @param selectedBagOp the constraint op returned by the method getSelectedOp()
* if the bag is selected
*/
public void setSelectedBagOp(ConstraintOp selectedBagOp) {
this.selectedBagOp = selectedBagOp;
}
/**
* Set the seletedBagValue returned bye the getSelectedValue if the bag is selected
* @param selectedBagValue string to set the selectedBagValue
*/
public void setSelectedBagValue(String selectedBagValue) {
this.selectedBagValue = selectedBagValue;
}
/**
* If editing an existing LOOKUP constraint return the value selected for the extra constraint
* field. Otherwise return null
* @return the LOOKUP constraint extra value or null
*/
public String getSelectedExtraValue() {
if (con instanceof PathConstraintLookup) {
return ((PathConstraintLookup) con).getExtraValue();
}
return null;
}
/**
* Given the path being constrained return the valid constraint operations. If constraining an
* attribute the valid ops depend on the type being constraint - String, Integer, Boolean, etc.
* @return the valid constraint operations
*/
public List<DisplayConstraintOption> getValidOps() {
if (validOps != null) {
return validOps;
}
validOps = new ArrayList<DisplayConstraintOption>();
if (con instanceof PathConstraintBag) {
for (ConstraintOp op : PathConstraintBag.VALID_OPS) {
validOps.add(new DisplayConstraintOption(op.toString(), op.getIndex()));
}
} else if (con instanceof PathConstraintSubclass) {
return validOps;
} else if (con instanceof PathConstraintLoop) {
List<DisplayConstraintOption> loopQueryOps = getLoopQueryOps();
for (DisplayConstraintOption dco : loopQueryOps) {
validOps.add(dco);
}
} else if (path.endIsAttribute()) {
List<ConstraintOp> allOps = SimpleConstraint.validOps(path.getEndType());
// TODO This was in the constraint jsp:
// <c:if test="${!(editingNode.type == 'String' && (op.value == '<='
//|| op.value == '>='))}">
// TODO this should show different options if a dropdown is to be used
boolean existPossibleValues =
(getPossibleValues() != null && getPossibleValues().size() > 0) ? true : false;
for (ConstraintOp op : allOps) {
if (existPossibleValues
|| (!op.getIndex().equals(ConstraintOp.MATCHES.getIndex())
&& !op.getIndex().equals(ConstraintOp.DOES_NOT_MATCH.getIndex()))
) {
validOps.add(new DisplayConstraintOption(op.toString(), op.getIndex()));
}
}
if (existPossibleValues) {
for (ConstraintOp op : PathConstraintMultiValue.VALID_OPS) {
validOps.add(new DisplayConstraintOption(op.toString(),
op.getIndex()));
}
}
} else if (isLookup()) {
// this must be a LOOKUP constraint
ConstraintOp lookup = ConstraintOp.LOOKUP;
validOps.add(new DisplayConstraintOption(lookup.toString(), lookup.getIndex()));
}
return validOps;
}
/**
* Returns the set of operators valid for loop constraints.
*
* @return a List of DisplayConstraintOption objects
*/
public List<DisplayConstraintOption> getLoopQueryOps() {
return Arrays.asList(new DisplayConstraintOption(ConstraintOp.EQUALS.toString(),
ConstraintOp.EQUALS.getIndex()),
new DisplayConstraintOption(ConstraintOp.NOT_EQUALS.toString(),
ConstraintOp.NOT_EQUALS.getIndex()));
}
/**
* Return true if this constraint should be a LOOKUP, true if constraining a class (ref/col)
* instead of an attribute and that class has class keys defined.
* @return true if this constraint should be a LOOKUP
*/
public boolean isLookup() {
return !path.endIsAttribute() && ClassKeyHelper.hasKeyFields(classKeys, endCls);
}
/**
* Return the LOOKUP constraint op.
* @return the LOOKUP constraint op
*/
// TOOO do we need this? validOps should contain correct value
public DisplayConstraintOption getLookupOp() {
ConstraintOp lookup = ConstraintOp.LOOKUP;
return new DisplayConstraintOption(lookup.toString(), lookup.getIndex());
}
/**
* Return the autocompleter for this path if one is available. Otherwise return null.
* @return an autocompleter for this path or null
*/
public AutoCompleter getAutoCompleter() {
if (ac != null && ac.hasAutocompleter(endCls, fieldName)) {
return ac;
}
return null;
}
/**
* Values to populate a dropdown for the path if possible values are available.
* @return possible values to populate a dropdown
*/
public List<Object> getPossibleValues() {
String className = "";
if (path.isRootPath()) {
className = path.getStartClassDescriptor().getType().getCanonicalName();
} else {
className = path.getLastClassDescriptor().getType().getCanonicalName();
}
// if this is a template, it may have been summarised so we have a restricted set if values
// for particular paths (the TemplateSummariser runs queries to work out exact values
// constraints could take given the other constraints in the query.
if (templateSummary != null && !templateSummary.isEmpty()) {
return templateSummary;
}
// otherwise, we may have possible values from the ObjectStoreSummary
List<Object> fieldValues = oss.getFieldValues(className, fieldName);
if (fieldValues.size() == 1 && fieldValues.get(0) == null) {
return null;
}
if (path.endIsAttribute()) {
Class<?> type = path.getEndType();
if (Date.class.equals(type)) {
List<Object> fieldValueFormatted = new ArrayList<Object>();
if (fieldValues != null) {
for (Object obj : fieldValues) {
fieldValueFormatted.add(ConstraintValueParser.format((String) obj));
}
}
return fieldValueFormatted;
}
}
return fieldValues;
}
/**
* If a dropdown is available for a constraint fewer operations are possible, return the list
* of operations.
* @return the constraint ops available when selecting values from a dropdown
*/
// TODO Do we need this, could getValildOps return the correct ops if a dropdown is available
public List<DisplayConstraintOption> getFixedOps() {
if (fixedOps != null) {
return fixedOps;
}
if (getPossibleValues() != null) {
fixedOps = new ArrayList<DisplayConstraintOption>();
for (ConstraintOp op : SimpleConstraint.fixedEnumOps(path.getEndType())) {
fixedOps.add(new DisplayConstraintOption(op.toString(), op.getIndex()));
}
}
return fixedOps;
}
/**
* Return true if this is a LOOKUP constraint and an extra constraint should be available.
* @return true if an extra constraint option is available
*/
public boolean isExtraConstraint() {
if (isLookup() && bagQueryConfig != null) {
String extraValueFieldName = bagQueryConfig.getConnectField();
ClassDescriptor cld = (path.isRootPath()) ? path.getStartClassDescriptor()
: path.getLastClassDescriptor();
ReferenceDescriptor fd = cld.getReferenceDescriptorByName(extraValueFieldName, true);
return fd != null;
} else {
return false;
}
}
public boolean isShowExtraConstraint() {
return showExtraConstraint;
}
public void setShowExtraConstraint(boolean showExtraConstraint) {
this.showExtraConstraint = showExtraConstraint;
}
public String getExtraValueFieldClass() {
if (isExtraConstraint()) {
return bagQueryConfig.getExtraConstraintClassName();
}
return null;
}
public String getExtraConnectFieldPath() {
if (isExtraConstraint()) {
return path.toStringNoConstraints() + "." + bagQueryConfig.getConnectField();
}
return null;
}
/**
* If a LOOKUP constraint and an extra constraint is available for this path, return a list of
* the possible values for populating a dropdown. Otherwise return null.
* @return a list of possible extra constraint values
*/
public List<Object> getExtraConstraintValues() {
if (isExtraConstraint()) {
String extraValueFieldName = bagQueryConfig.getConstrainField();
return oss.getFieldValues(bagQueryConfig.getExtraConstraintClassName(),
extraValueFieldName);
}
return null;
}
/**
* If a LOOKUP constraint and an extra value constraint is available return the classname of
* the extra constraint so it can be displayed. Otherwise return null.
* @return the extra constraint class name or null
*/
public String getExtraConstraintClassName() {
if (isExtraConstraint()) {
String[] splitClassName = bagQueryConfig.getExtraConstraintClassName().split("[.]");
return splitClassName[splitClassName.length - 1];
//return bagQueryConfig.getExtraConstraintClassName();
}
return null;
}
/**
* Return the key fields for this path as a formatted string, for use in LOOKUP help message.
* @return a formatted string listing key fields for this path
*/
public String getKeyFields() {
if (ClassKeyHelper.hasKeyFields(classKeys, endCls)) {
return StringUtil.prettyList(ClassKeyHelper.getKeyFieldNames(classKeys, endCls), true);
}
return null;
}
/**
* Get a list of public and user bag names available and currentfor this path.
* If none available return null.
* @return a list of available bag names or null
*/
public List<String> getBags() {
if (ClassKeyHelper.hasKeyFields(classKeys, endCls)
&& !ClassKeyHelper.isKeyField(classKeys, endCls, fieldName)) {
Map<String, InterMineBag> bags =
bagManager.getCurrentUserOrGlobalBagsOfType(profile, endCls);
if (!bags.isEmpty()) {
List<String> bagList = new ArrayList<String>(bags.keySet());
Collections.sort(bagList);
return bagList;
}
}
return null;
}
/**
* Return the valid constraint ops when constraining on a bag.
* @return the possible bag constraint operations
*/
public List<DisplayConstraintOption> getBagOps() {
List<DisplayConstraintOption> bagOps = new ArrayList<DisplayConstraintOption>();
for (ConstraintOp op : BagConstraint.VALID_OPS) {
bagOps.add(new DisplayConstraintOption(op.toString(), op.getIndex()));
}
return bagOps;
}
/**
* Returns the bag type that the constraint can be constrained to.
* If there aren't bags return null
*
* @return a String
*/
public String getBagType() {
if (getBags() != null) {
return endCls;
} else {
return null;
}
}
/**
* Returns the constraint type selected.
*
* @return a String representing the constraint type selected
*/
public String getSelectedConstraint() {
if (isBagSelected()) {
return "bag";
} else if (isNullSelected()) {
return "empty";
} else if (isLoopSelected()) {
return "loopQuery";
}
return "attribute";
}
/**
* Returns the set of paths that could feasibly be loop constrained onto the constraint's path,
* given the query's outer join situation. A candidate path must be a class path, of the same
* type, and in the same outer join group.
*
* @return a Set of String paths that could be loop joined
* @throws PathException if something goes wrong
*/
public Set<String> getCandidateLoops() throws PathException {
if (path.endIsAttribute()) {
return Collections.emptySet();
} else {
if (con instanceof PathConstraintLoop) {
Set<String> retval = new LinkedHashSet<String>();
retval.add(((PathConstraintLoop) con).getLoopPath());
retval.addAll(query.getCandidateLoops(path.getNoConstraintsString()));
return retval;
} else {
return query.getCandidateLoops(path.getNoConstraintsString());
}
}
}
/**
* Return true if the constraint is locked, it should'n be enabled or disabled.
* @return true if the constraint is locked
*/
public boolean isLocked() {
if (switchOffAbility == null || switchOffAbility == SwitchOffAbility.LOCKED) {
return true;
}
return false;
}
/**
* Return true if the constraint is enabled, false if it is disabled or locked.
* @return true if the constraint is enabled,false if it is disabled or locked
*/
public boolean isEnabled() {
if (switchOffAbility == SwitchOffAbility.ON) {
return true;
}
return false;
}
/**
* Return true if the constraint is disabled, false if it is enabled or locked.
* @return true if the constraint is disabled,false if it is enabled or locked
*/
public boolean isDisabled() {
if (switchOffAbility == SwitchOffAbility.OFF) {
return true;
}
return false;
}
/**
* Return the value on, off, locked depending on the constraint SwitchOffAbility .
* @return switchable property (on, off, locked)
*/
public String getSwitchable() {
if (SwitchOffAbility.ON.equals(switchOffAbility)) {
return SwitchOffAbility.ON.toString().toLowerCase();
} else if (SwitchOffAbility.OFF.equals(switchOffAbility)) {
return SwitchOffAbility.OFF.toString().toLowerCase();
} else {
return SwitchOffAbility.LOCKED.toString().toLowerCase();
}
}
/**
* Set the switchOffAbility
* @param switchOffAbility value
*/
public void setSwitchOffAbility(SwitchOffAbility switchOffAbility) {
this.switchOffAbility = switchOffAbility;
}
/**
* Return true if the input field can be displayed, method for use in JSP
* @return true if the input is displayed
*/
public boolean isInputFieldDisplayed() {
if (con != null) {
int selectedOperator = getSelectedOp().getProperty();
if (selectedOperator == ConstraintOp.MATCHES.getIndex()
|| selectedOperator == ConstraintOp.DOES_NOT_MATCH.getIndex()
|| selectedOperator == ConstraintOp.LOOKUP.getIndex()
|| selectedOperator == ConstraintOp.CONTAINS.getIndex()
|| selectedOperator == ConstraintOp.DOES_NOT_CONTAIN.getIndex()) {
return true;
}
if (selectedOperator == ConstraintOp.ONE_OF.getIndex()
|| selectedOperator == ConstraintOp.NONE_OF.getIndex()) {
if (con instanceof PathConstraintBag) {
return true;
}
return false;
}
if (getPossibleValues() != null && getPossibleValues().size() > 0) {
return false;
}
return true;
}
if (getPossibleValues() != null && getPossibleValues().size() > 0) {
return false;
}
return true;
}
/**
* Return true if the drop-down containing the possibleValues can be displayed,
* method for use in JSP
* @return true if the drop-down is displayed
*/
public boolean isPossibleValuesDisplayed() {
if (con != null) {
if (getSelectedOp() == null) {
return false;
}
int selectedOperator = getSelectedOp().getProperty();
if (selectedOperator == ConstraintOp.MATCHES.getIndex()
|| selectedOperator == ConstraintOp.DOES_NOT_MATCH.getIndex()
|| selectedOperator == ConstraintOp.CONTAINS.getIndex()
|| selectedOperator == ConstraintOp.DOES_NOT_CONTAIN.getIndex()
|| selectedOperator == ConstraintOp.LOOKUP.getIndex()
|| selectedOperator == ConstraintOp.ONE_OF.getIndex()
|| selectedOperator == ConstraintOp.NONE_OF.getIndex()) {
return false;
}
if (getPossibleValues() != null && getPossibleValues().size() > 0) {
return true;
}
return false;
}
if (getPossibleValues() != null && getPossibleValues().size() > 0) {
return true;
}
return false;
}
/**
* Return true if the multi-select containing the possibleValue can be displayed,
* method for use in JSP
* @return true if the multi-select is displayed
*/
public boolean isMultiValuesDisplayed() {
if (con != null) {
int selectedOperator = getSelectedOp().getProperty();
if (selectedOperator == ConstraintOp.ONE_OF.getIndex()
|| selectedOperator == ConstraintOp.NONE_OF.getIndex()) {
return true;
}
return false;
} return false;
}
/**
* Representation of a constraint operation to populate a dropdown. Label is value to be
* displayed in the dropdown, property is the index of the constraint that will be selected.
* @author Richard Smith
*
*/
public class DisplayConstraintOption
{
private String label;
private Integer property;
/**
* Construct with the constraint lable and index
* @param label the value to be shown in dropdown
* @param property the constraint index to be added to form on selection
*/
public DisplayConstraintOption(String label, Integer property) {
this.label = label;
this.property = property;
}
/**
* Get the value to be displayed in the dropdown for this operation.
* @return the display value
*/
public String getLabel() {
return label;
}
/**
* Get the constraint index to be put in form when this op is selected.
* @return the constraint index
*/
public Integer getProperty() {
return property;
}
}
}
| intermine/web/main/src/org/intermine/web/logic/query/DisplayConstraint.java | package org.intermine.web.logic.query;
/*
* Copyright (C) 2002-2011 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Collection;
import java.util.Date;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.intermine.api.bag.BagManager;
import org.intermine.api.bag.BagQueryConfig;
import org.intermine.api.config.ClassKeyHelper;
import org.intermine.api.profile.InterMineBag;
import org.intermine.api.profile.Profile;
import org.intermine.metadata.ClassDescriptor;
import org.intermine.metadata.FieldDescriptor;
import org.intermine.metadata.ReferenceDescriptor;
import org.intermine.objectstore.ObjectStoreSummary;
import org.intermine.objectstore.query.BagConstraint;
import org.intermine.objectstore.query.ConstraintOp;
import org.intermine.objectstore.query.SimpleConstraint;
import org.intermine.pathquery.ConstraintValueParser;
import org.intermine.pathquery.Path;
import org.intermine.pathquery.PathConstraint;
import org.intermine.pathquery.PathConstraintAttribute;
import org.intermine.pathquery.PathConstraintBag;
import org.intermine.pathquery.PathConstraintLookup;
import org.intermine.pathquery.PathConstraintLoop;
import org.intermine.pathquery.PathConstraintMultiValue;
import org.intermine.pathquery.PathConstraintNull;
import org.intermine.pathquery.PathConstraintSubclass;
import org.intermine.pathquery.PathException;
import org.intermine.pathquery.PathQuery;
import org.intermine.template.SwitchOffAbility;
import org.intermine.util.StringUtil;
import org.intermine.web.autocompletion.AutoCompleter;
import org.intermine.web.logic.querybuilder.DisplayPath;
/**
* Representation of a PathQuery constraint for use by JSP pages. This object provides methods
* needed to populate constraint editing boxes and dropdowns, find available bag names, etc. Can
* either represent a new constraint to be added with no values set or an existing constraint that
* is being edited.
*
* Get methods return null if no values are available
*
* @author Richard Smith
*/
public class DisplayConstraint
{
private Path path;
private List<DisplayConstraintOption> validOps;
private AutoCompleter ac;
private ObjectStoreSummary oss;
private String endCls;
private String fieldName;
private BagQueryConfig bagQueryConfig;
private Map<String, List<FieldDescriptor>> classKeys;
private BagManager bagManager;
private Profile profile;
private String constraintLabel;
private List<DisplayConstraintOption> fixedOps;
private PathConstraint con;
private PathQuery query;
private String code;
private boolean editableInTemplate;
private SwitchOffAbility switchOffAbility;
private boolean isBagSelected;
private String selectedBagValue;
private ConstraintOp selectedBagOp;
private List<Object> templateSummary;
private boolean showExtraConstraint = false;
/**
* Construct for a new constraint that is being added to a query.
* @param path The path that is being constrained
* @param profile user editing the query, used to fetch available bags
* @param query the PathQuery, in order to provide information on candidate loops
* @param ac auto completer
* @param oss summary data for the ObjectStore contents
* @param bagQueryConfig addition details for needed for LOOKUP constraints
* @param classKeys identifier field config, needed for LOOKUP constraints
* @param bagManager provides access to saved bags
*/
protected DisplayConstraint(Path path, Profile profile, PathQuery query, AutoCompleter ac,
ObjectStoreSummary oss, BagQueryConfig bagQueryConfig,
Map<String, List<FieldDescriptor>> classKeys, BagManager bagManager) {
init(path, profile, query, ac, oss, bagQueryConfig, classKeys, bagManager);
}
/**
* Construct for an existing constraint that is being edited.
* @param path The path that is being constrained
* @param con the constraint being edited
* @param label text associated with this constraint, if a template query
* @param code the code of this constraint in the query
* @param editableInTemplate true if this is a template query and this constraint is editable
* @param switchOffAbility if the contraint is on, off, locked
* @param profile user editing the query, used to fetch available bags
* @param query the PathQuery, in order to provide information on candidate loops
* @param ac auto completer
* @param oss summary data for the ObjectStore contents
* @param bagQueryConfig addition details for needed for LOOKUP constraints
* @param classKeys identifier field config, needed for LOOKUP constraints
* @param bagManager provides access to saved bags
*/
protected DisplayConstraint(Path path, PathConstraint con, String label, String code,
boolean editableInTemplate, SwitchOffAbility switchOffAbility, Profile profile,
PathQuery query, AutoCompleter ac,
ObjectStoreSummary oss, BagQueryConfig bagQueryConfig,
Map<String, List<FieldDescriptor>> classKeys, BagManager bagManager,
List<Object> templateSummary) {
init(path, profile, query, ac, oss, bagQueryConfig, classKeys, bagManager);
this.con = con;
this.constraintLabel = label;
this.code = code;
this.editableInTemplate = editableInTemplate;
this.switchOffAbility = switchOffAbility;
this.templateSummary = templateSummary;
}
private void init(Path path, Profile profile, PathQuery query, AutoCompleter ac,
ObjectStoreSummary oss, BagQueryConfig bagQueryConfig,
Map<String, List<FieldDescriptor>> classKeys, BagManager bagManager) {
this.path = path;
this.ac = ac;
this.oss = oss;
this.endCls = getEndClass(path);
this.fieldName = getFieldName(path);
this.bagQueryConfig = bagQueryConfig;
this.classKeys = classKeys;
this.profile = profile;
this.query = query;
this.bagManager = bagManager;
this.isBagSelected = false;
if (isExtraConstraint()) {
this.showExtraConstraint = true;
}
}
private String getEndClass(Path path) {
if (path.isRootPath()) {
return path.getStartClassDescriptor().getType().getSimpleName();
} else {
return path.getLastClassDescriptor().getType().getSimpleName();
}
}
private String getFieldName(Path path) {
if (!path.isRootPath()) {
return path.getLastElement();
}
return null;
}
// TODO this should be in some common code
private String constraintStringValue(PathConstraint con) {
if (con instanceof PathConstraintAttribute) {
return ((PathConstraintAttribute) con).getValue();
} else if (con instanceof PathConstraintBag) {
return ((PathConstraintBag) con).getBag();
} else if (con instanceof PathConstraintLookup) {
return ((PathConstraintLookup) con).getValue();
} else if (con instanceof PathConstraintSubclass) {
return ((PathConstraintSubclass) con).getType();
} else if (con instanceof PathConstraintLoop) {
return ((PathConstraintLoop) con).getLoopPath();
} else if (con instanceof PathConstraintNull) {
return ((PathConstraintNull) con).getOp().toString();
}
return null;
}
/**
* If editing an existing constraint get the code for this constraint in the query, return null
* if creating a new constraint.
* @return the constraint code or null
*/
public String getCode() {
return code;
}
/**
* Return true if editing an existing template constraint and that constraint is editable.
* @return true if an editable template constraint, or null
*/
public boolean isEditableInTemplate() {
return editableInTemplate;
}
/**
* Get a representation of the path that is being constraint. DisplayPath provides convenience
* methods for use in JSP.
* @return the path being constrained
*/
public DisplayPath getPath() {
return new DisplayPath(path);
}
/**
* If editing an existing constraint, return the selected value. Otherwise return null. If
* an attribute constraint this will be the user entered. If a bag constraint, the selected
* bag name, etc. If an attribute constraint, but the use bag is setted, this will be the
* selectedBagValue setted
* @return the selected value or null
*/
public String getSelectedValue() {
if (isBagSelected) {
return selectedBagValue;
}
if (con != null) {
return constraintStringValue(con);
}
return null;
}
/**
*
*/
public String getOriginalValue() {
if (con != null) {
return constraintStringValue(con);
}
return null;
}
/**
* Returns the value collection if the constraint is a multivalue, otherwise return null.
*
* @return a Collection of Strings
*/
public Collection<String> getMultiValues() {
if (isMultiValueSelected()) {
return ((PathConstraintMultiValue) con).getValues();
}
return null;
}
/**
* If the constraint is a multivalue, returns the value collection
* represented as string separated by ', ', otherwise return an empty String.
*
* @return a String representing the multivalues of constraint
*/
public String getMultiValuesAsString() {
String multiValuesAsString = "";
if (getMultiValues() != null) {
for (String value : getMultiValues()) {
multiValuesAsString += value + ", ";
}
multiValuesAsString = multiValuesAsString.substring(0,
multiValuesAsString.lastIndexOf(","));
}
return multiValuesAsString;
}
/**
* Return true if editing an existing constraint and a bag has been selected.
* @return true if a bag has been selected
*/
public boolean isBagSelected() {
if (isBagSelected) {
return isBagSelected;
} else {
return (con != null && con instanceof PathConstraintBag);
}
}
/**
* Set if the bag is selected, used by the method isBagSelected that returns true,
* even if the constraint is an attribute constraint
* @param isBagSelected true if a bag has been selected
*/
public void setBagSelected(boolean isBagSelected) {
this.isBagSelected = isBagSelected;
}
/**
* Return true if editing an existing constraint and 'has a value' or 'has no value' has been
* selected.
* @return true if a null constraint was selected
*/
public boolean isNullSelected() {
return (con != null && con instanceof PathConstraintNull);
}
/**
* Return true if editing an existing having the attribute type boolean or Boolean
* @return true if the type is the primitive boolean or the object java.lang.Boolean
*/
public boolean isBoolean() {
String type = getPath().getType();
return ("boolean".equals(type) || "Boolean".equals(type));
}
/**
* Return true if editing an existing constraint and an attribute value or LOOKUP constraint
* was selected.
* @return true if an attribute/LOOKUP constraint was selected
*/
public boolean isValueSelected() {
if (con != null) {
return !(isBagSelected() || isNullSelected() || isLoopSelected());
}
return false;
}
/**
* Return true if editing an existing constraint and a loop value has been
* selected.
* @return true if a loop constraint was selected
*/
public boolean isLoopSelected() {
return (con != null && con instanceof PathConstraintLoop);
}
/**
* Return true if editing an existing constraint and a multivalue has been
* selected.
* @return true if a multivalue constraint was selected
*/
public boolean isMultiValueSelected() {
return (con != null && con instanceof PathConstraintMultiValue);
}
/**
* Return the last class in the path and fieldname as the title for the constraint.
* @return the title of this constraint
*/
public String getTitle() {
return endCls + (fieldName == null ? "" : " " + fieldName);
}
public String getEndClassName() {
return endCls;
}
/**
* Return the label associated with a constraint if editing a template query constraint.
* @return the constraint label
*/
public String getDescription() {
return constraintLabel;
}
/**
* Return a help message to display alongside the constraint, this will examine the constraint
* type and generate and appropriate message, e.g. list the key fields for LOOKUP constraints
* and explain the use of wildcards. Returns null when there is no appropriate help.
* @return the help message or null
*/
public String getHelpMessage() {
return DisplayConstraintHelpMessages.getHelpMessage(this);
}
/**
* If the bag is selected, return the value setted with the method setSelectedBagOp
* If editing an existing constraint return the operation used.
* Otherwise return null.
* @return the selected constraint op or null
*/
public DisplayConstraintOption getSelectedOp() {
if (isBagSelected) {
return new DisplayConstraintOption(selectedBagOp.toString(),
selectedBagOp.getIndex());
}
if (con != null) {
ConstraintOp selectedOp = con.getOp();
if (selectedOp != null) {
return new DisplayConstraintOption(selectedOp.toString(), selectedOp.getIndex());
}
}
return null;
}
/**
* Set the seletedBagOp
* @param selectedBagOp the constraint op returned by the method getSelectedOp()
* if the bag is selected
*/
public void setSelectedBagOp(ConstraintOp selectedBagOp) {
this.selectedBagOp = selectedBagOp;
}
/**
* Set the seletedBagValue returned bye the getSelectedValue if the bag is selected
* @param selectedBagValue string to set the selectedBagValue
*/
public void setSelectedBagValue(String selectedBagValue) {
this.selectedBagValue = selectedBagValue;
}
/**
* If editing an existing LOOKUP constraint return the value selected for the extra constraint
* field. Otherwise return null
* @return the LOOKUP constraint extra value or null
*/
public String getSelectedExtraValue() {
if (con instanceof PathConstraintLookup) {
return ((PathConstraintLookup) con).getExtraValue();
}
return null;
}
/**
* Given the path being constrained return the valid constraint operations. If constraining an
* attribute the valid ops depend on the type being constraint - String, Integer, Boolean, etc.
* @return the valid constraint operations
*/
public List<DisplayConstraintOption> getValidOps() {
if (validOps != null) {
return validOps;
}
validOps = new ArrayList<DisplayConstraintOption>();
if (con instanceof PathConstraintBag) {
for (ConstraintOp op : PathConstraintBag.VALID_OPS) {
validOps.add(new DisplayConstraintOption(op.toString(), op.getIndex()));
}
} else if (con instanceof PathConstraintSubclass) {
return validOps;
} else if (con instanceof PathConstraintLoop) {
List<DisplayConstraintOption> loopQueryOps = getLoopQueryOps();
for (DisplayConstraintOption dco : loopQueryOps) {
validOps.add(dco);
}
} else if (path.endIsAttribute()) {
List<ConstraintOp> allOps = SimpleConstraint.validOps(path.getEndType());
// TODO This was in the constraint jsp:
// <c:if test="${!(editingNode.type == 'String' && (op.value == '<='
//|| op.value == '>='))}">
// TODO this should show different options if a dropdown is to be used
boolean existPossibleValues =
(getPossibleValues() != null && getPossibleValues().size() > 0) ? true : false;
for (ConstraintOp op : allOps) {
if (existPossibleValues
|| (!op.getIndex().equals(ConstraintOp.MATCHES.getIndex())
&& !op.getIndex().equals(ConstraintOp.DOES_NOT_MATCH.getIndex()))
) {
validOps.add(new DisplayConstraintOption(op.toString(), op.getIndex()));
}
}
if (existPossibleValues) {
for (ConstraintOp op : PathConstraintMultiValue.VALID_OPS) {
validOps.add(new DisplayConstraintOption(op.toString(),
op.getIndex()));
}
}
} else if (isLookup()) {
// this must be a LOOKUP constraint
ConstraintOp lookup = ConstraintOp.LOOKUP;
validOps.add(new DisplayConstraintOption(lookup.toString(), lookup.getIndex()));
}
return validOps;
}
/**
* Returns the set of operators valid for loop constraints.
*
* @return a List of DisplayConstraintOption objects
*/
public List<DisplayConstraintOption> getLoopQueryOps() {
return Arrays.asList(new DisplayConstraintOption(ConstraintOp.EQUALS.toString(),
ConstraintOp.EQUALS.getIndex()),
new DisplayConstraintOption(ConstraintOp.NOT_EQUALS.toString(),
ConstraintOp.NOT_EQUALS.getIndex()));
}
/**
* Return true if this constraint should be a LOOKUP, true if constraining a class (ref/col)
* instead of an attribute and that class has class keys defined.
* @return true if this constraint should be a LOOKUP
*/
public boolean isLookup() {
return !path.endIsAttribute() && ClassKeyHelper.hasKeyFields(classKeys, endCls);
}
/**
* Return the LOOKUP constraint op.
* @return the LOOKUP constraint op
*/
// TOOO do we need this? validOps should contain correct value
public DisplayConstraintOption getLookupOp() {
ConstraintOp lookup = ConstraintOp.LOOKUP;
return new DisplayConstraintOption(lookup.toString(), lookup.getIndex());
}
/**
* Return the autocompleter for this path if one is available. Otherwise return null.
* @return an autocompleter for this path or null
*/
public AutoCompleter getAutoCompleter() {
if (ac != null && ac.hasAutocompleter(endCls, fieldName)) {
return ac;
}
return null;
}
/**
* Values to populate a dropdown for the path if possible values are available.
* @return possible values to populate a dropdown
*/
public List<Object> getPossibleValues() {
String className = "";
if (path.isRootPath()) {
className = path.getStartClassDescriptor().getType().getCanonicalName();
} else {
className = path.getLastClassDescriptor().getType().getCanonicalName();
}
// if this is a template, it may have been summarised so we have a restricted set if values
// for particular paths (the TemplateSummariser runs queries to work out exact values
// constraints could take given the other constraints in the query.
if (templateSummary != null && !templateSummary.isEmpty()) {
return templateSummary;
}
// otherwise, we may have possible values from the ObjectStoreSummary
List<Object> fieldValues = oss.getFieldValues(className, fieldName);
if (fieldValues.size() == 1 && fieldValues.get(0) == null) {
return null;
}
if (path.endIsAttribute()) {
Class<?> type = path.getEndType();
if (Date.class.equals(type)) {
List<Object> fieldValueFormatted = new ArrayList<Object>();
if (fieldValues != null) {
for (Object obj : fieldValues) {
fieldValueFormatted.add(ConstraintValueParser.format((String) obj));
}
}
return fieldValueFormatted;
}
}
return fieldValues;
}
/**
* If a dropdown is available for a constraint fewer operations are possible, return the list
* of operations.
* @return the constraint ops available when selecting values from a dropdown
*/
// TODO Do we need this, could getValildOps return the correct ops if a dropdown is available
public List<DisplayConstraintOption> getFixedOps() {
if (fixedOps != null) {
return fixedOps;
}
if (getPossibleValues() != null) {
fixedOps = new ArrayList<DisplayConstraintOption>();
for (ConstraintOp op : SimpleConstraint.fixedEnumOps(path.getEndType())) {
fixedOps.add(new DisplayConstraintOption(op.toString(), op.getIndex()));
}
}
return fixedOps;
}
/**
* Return true if this is a LOOKUP constraint and an extra constraint should be available.
* @return true if an extra constraint option is available
*/
public boolean isExtraConstraint() {
if (isLookup() && bagQueryConfig != null) {
String extraValueFieldName = bagQueryConfig.getConnectField();
ClassDescriptor cld = (path.isRootPath()) ? path.getStartClassDescriptor()
: path.getLastClassDescriptor();
ReferenceDescriptor fd = cld.getReferenceDescriptorByName(extraValueFieldName, true);
return fd != null;
} else {
return false;
}
}
public boolean isShowExtraConstraint() {
return showExtraConstraint;
}
public void setShowExtraConstraint(boolean showExtraConstraint) {
this.showExtraConstraint = showExtraConstraint;
}
public String getExtraValueFieldClass() {
if (isExtraConstraint()) {
return bagQueryConfig.getExtraConstraintClassName();
}
return null;
}
public String getExtraConnectFieldPath() {
if (isExtraConstraint()) {
return path.toStringNoConstraints() + "." + bagQueryConfig.getConnectField();
}
return null;
}
/**
* If a LOOKUP constraint and an extra constraint is available for this path, return a list of
* the possible values for populating a dropdown. Otherwise return null.
* @return a list of possible extra constraint values
*/
public List<Object> getExtraConstraintValues() {
if (isExtraConstraint()) {
String extraValueFieldName = bagQueryConfig.getConstrainField();
return oss.getFieldValues(bagQueryConfig.getExtraConstraintClassName(),
extraValueFieldName);
}
return null;
}
/**
* If a LOOKUP constraint and an extra value constraint is available return the classname of
* the extra constraint so it can be displayed. Otherwise return null.
* @return the extra constraint class name or null
*/
public String getExtraConstraintClassName() {
if (isExtraConstraint()) {
String[] splitClassName = bagQueryConfig.getExtraConstraintClassName().split("[.]");
return splitClassName[splitClassName.length - 1];
//return bagQueryConfig.getExtraConstraintClassName();
}
return null;
}
/**
* Return the key fields for this path as a formatted string, for use in LOOKUP help message.
* @return a formatted string listing key fields for this path
*/
public String getKeyFields() {
if (ClassKeyHelper.hasKeyFields(classKeys, endCls)) {
return StringUtil.prettyList(ClassKeyHelper.getKeyFieldNames(classKeys, endCls), true);
}
return null;
}
/**
* Get a list of public and user bag names available and currentfor this path. If none available return
* null.
* @return a list of available bag names or null
*/
public List<String> getBags() {
if (ClassKeyHelper.hasKeyFields(classKeys, endCls)
&& !ClassKeyHelper.isKeyField(classKeys, endCls, fieldName)) {
Map<String, InterMineBag> bags =
bagManager.getCurrentUserOrGlobalBagsOfType(profile, endCls);
if (!bags.isEmpty()) {
List<String> bagList = new ArrayList<String>(bags.keySet());
Collections.sort(bagList);
return bagList;
}
}
return null;
}
/**
* Return the valid constraint ops when constraining on a bag.
* @return the possible bag constraint operations
*/
public List<DisplayConstraintOption> getBagOps() {
List<DisplayConstraintOption> bagOps = new ArrayList<DisplayConstraintOption>();
for (ConstraintOp op : BagConstraint.VALID_OPS) {
bagOps.add(new DisplayConstraintOption(op.toString(), op.getIndex()));
}
return bagOps;
}
/**
* Returns the bag type that the constraint can be constrained to.
* If there aren't bags return null
*
* @return a String
*/
public String getBagType() {
if (getBags() != null) {
return endCls;
} else {
return null;
}
}
/**
* Returns the constraint type selected.
*
* @return a String representing the constraint type selected
*/
public String getSelectedConstraint() {
if (isBagSelected()) {
return "bag";
} else if (isNullSelected()) {
return "empty";
} else if (isLoopSelected()) {
return "loopQuery";
}
return "attribute";
}
/**
* Returns the set of paths that could feasibly be loop constrained onto the constraint's path,
* given the query's outer join situation. A candidate path must be a class path, of the same
* type, and in the same outer join group.
*
* @return a Set of String paths that could be loop joined
* @throws PathException if something goes wrong
*/
public Set<String> getCandidateLoops() throws PathException {
if (path.endIsAttribute()) {
return Collections.emptySet();
} else {
if (con instanceof PathConstraintLoop) {
Set<String> retval = new LinkedHashSet<String>();
retval.add(((PathConstraintLoop) con).getLoopPath());
retval.addAll(query.getCandidateLoops(path.getNoConstraintsString()));
return retval;
} else {
return query.getCandidateLoops(path.getNoConstraintsString());
}
}
}
/**
* Return true if the constraint is locked, it should'n be enabled or disabled.
* @return true if the constraint is locked
*/
public boolean isLocked() {
if (switchOffAbility == null || switchOffAbility == SwitchOffAbility.LOCKED) {
return true;
}
return false;
}
/**
* Return true if the constraint is enabled, false if it is disabled or locked.
* @return true if the constraint is enabled,false if it is disabled or locked
*/
public boolean isEnabled() {
if (switchOffAbility == SwitchOffAbility.ON) {
return true;
}
return false;
}
/**
* Return true if the constraint is disabled, false if it is enabled or locked.
* @return true if the constraint is disabled,false if it is enabled or locked
*/
public boolean isDisabled() {
if (switchOffAbility == SwitchOffAbility.OFF) {
return true;
}
return false;
}
/**
* Return the value on, off, locked depending on the constraint SwitchOffAbility .
* @return switchable property (on, off, locked)
*/
public String getSwitchable() {
if (SwitchOffAbility.ON.equals(switchOffAbility)) {
return SwitchOffAbility.ON.toString().toLowerCase();
} else if (SwitchOffAbility.OFF.equals(switchOffAbility)) {
return SwitchOffAbility.OFF.toString().toLowerCase();
} else {
return SwitchOffAbility.LOCKED.toString().toLowerCase();
}
}
/**
* Set the switchOffAbility
* @param switchOffAbility value
*/
public void setSwitchOffAbility(SwitchOffAbility switchOffAbility) {
this.switchOffAbility = switchOffAbility;
}
/**
* Return true if the input field can be displayed, method for use in JSP
* @return true if the input is displayed
*/
public boolean isInputFieldDisplayed() {
if (con != null) {
int selectedOperator = getSelectedOp().getProperty();
if (selectedOperator == ConstraintOp.MATCHES.getIndex()
|| selectedOperator == ConstraintOp.DOES_NOT_MATCH.getIndex()
|| selectedOperator == ConstraintOp.LOOKUP.getIndex()
|| selectedOperator == ConstraintOp.CONTAINS.getIndex()
|| selectedOperator == ConstraintOp.DOES_NOT_CONTAIN.getIndex()) {
return true;
}
if (selectedOperator == ConstraintOp.ONE_OF.getIndex()
|| selectedOperator == ConstraintOp.NONE_OF.getIndex()) {
if (con instanceof PathConstraintBag) {
return true;
}
return false;
}
if (getPossibleValues() != null && getPossibleValues().size() > 0) {
return false;
}
return true;
}
if (getPossibleValues() != null && getPossibleValues().size() > 0) {
return false;
}
return true;
}
/**
* Return true if the drop-down containing the possibleValues can be displayed,
* method for use in JSP
* @return true if the drop-down is displayed
*/
public boolean isPossibleValuesDisplayed() {
if (con != null) {
if (getSelectedOp() == null) {
return false;
}
int selectedOperator = getSelectedOp().getProperty();
if (selectedOperator == ConstraintOp.MATCHES.getIndex()
|| selectedOperator == ConstraintOp.DOES_NOT_MATCH.getIndex()
|| selectedOperator == ConstraintOp.CONTAINS.getIndex()
|| selectedOperator == ConstraintOp.DOES_NOT_CONTAIN.getIndex()
|| selectedOperator == ConstraintOp.LOOKUP.getIndex()
|| selectedOperator == ConstraintOp.ONE_OF.getIndex()
|| selectedOperator == ConstraintOp.NONE_OF.getIndex()) {
return false;
}
if (getPossibleValues() != null && getPossibleValues().size() > 0) {
return true;
}
return false;
}
if (getPossibleValues() != null && getPossibleValues().size() > 0) {
return true;
}
return false;
}
/**
* Return true if the multi-select containing the possibleValue can be displayed,
* method for use in JSP
* @return true if the multi-select is displayed
*/
public boolean isMultiValuesDisplayed() {
if (con != null) {
int selectedOperator = getSelectedOp().getProperty();
if (selectedOperator == ConstraintOp.ONE_OF.getIndex()
|| selectedOperator == ConstraintOp.NONE_OF.getIndex()) {
return true;
}
return false;
} return false;
}
/**
* Representation of a constraint operation to populate a dropdown. Label is value to be
* displayed in the dropdown, property is the index of the constraint that will be selected.
* @author Richard Smith
*
*/
public class DisplayConstraintOption
{
private String label;
private Integer property;
/**
* Construct with the constraint lable and index
* @param label the value to be shown in dropdown
* @param property the constraint index to be added to form on selection
*/
public DisplayConstraintOption(String label, Integer property) {
this.label = label;
this.property = property;
}
/**
* Get the value to be displayed in the dropdown for this operation.
* @return the display value
*/
public String getLabel() {
return label;
}
/**
* Get the constraint index to be put in form when this op is selected.
* @return the constraint index
*/
public Integer getProperty() {
return property;
}
}
}
| checkstyle
| intermine/web/main/src/org/intermine/web/logic/query/DisplayConstraint.java | checkstyle |
|
Java | lgpl-2.1 | cbe08aeff9c1f5efb108402d9e0f6c78f8175e55 | 0 | CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine | /*
* jETeL/CloverETL - Java based ETL application framework.
* Copyright (c) Javlin, a.s. ([email protected])
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.jetel.interpreter;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import java.util.regex.Matcher;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jetel.data.DataField;
import org.jetel.data.DataRecord;
import org.jetel.data.NullRecord;
import org.jetel.data.lookup.Lookup;
import org.jetel.data.primitive.CloverInteger;
import org.jetel.data.primitive.DecimalFactory;
import org.jetel.exception.BadDataFormatException;
import org.jetel.exception.ComponentNotReadyException;
import org.jetel.graph.TransformationGraph;
import org.jetel.graph.dictionary.Dictionary;
import org.jetel.graph.dictionary.StringDictionaryType;
import org.jetel.interpreter.ASTnode.CLVFAddNode;
import org.jetel.interpreter.ASTnode.CLVFAnd;
import org.jetel.interpreter.ASTnode.CLVFAssignment;
import org.jetel.interpreter.ASTnode.CLVFBlock;
import org.jetel.interpreter.ASTnode.CLVFBreakStatement;
import org.jetel.interpreter.ASTnode.CLVFBreakpointNode;
import org.jetel.interpreter.ASTnode.CLVFCaseExpression;
import org.jetel.interpreter.ASTnode.CLVFComparison;
import org.jetel.interpreter.ASTnode.CLVFContinueStatement;
import org.jetel.interpreter.ASTnode.CLVFDictionaryNode;
import org.jetel.interpreter.ASTnode.CLVFDirectMapping;
import org.jetel.interpreter.ASTnode.CLVFDivNode;
import org.jetel.interpreter.ASTnode.CLVFDoStatement;
import org.jetel.interpreter.ASTnode.CLVFEvalNode;
import org.jetel.interpreter.ASTnode.CLVFForStatement;
import org.jetel.interpreter.ASTnode.CLVFForeachStatement;
import org.jetel.interpreter.ASTnode.CLVFFunctionCallStatement;
import org.jetel.interpreter.ASTnode.CLVFFunctionDeclaration;
import org.jetel.interpreter.ASTnode.CLVFIfStatement;
import org.jetel.interpreter.ASTnode.CLVFIffNode;
import org.jetel.interpreter.ASTnode.CLVFImportSource;
import org.jetel.interpreter.ASTnode.CLVFIncrDecrStatement;
import org.jetel.interpreter.ASTnode.CLVFInputFieldLiteral;
import org.jetel.interpreter.ASTnode.CLVFIsNullNode;
import org.jetel.interpreter.ASTnode.CLVFListOfLiterals;
import org.jetel.interpreter.ASTnode.CLVFLiteral;
import org.jetel.interpreter.ASTnode.CLVFLookupNode;
import org.jetel.interpreter.ASTnode.CLVFMinusNode;
import org.jetel.interpreter.ASTnode.CLVFModNode;
import org.jetel.interpreter.ASTnode.CLVFMulNode;
import org.jetel.interpreter.ASTnode.CLVFNVL2Node;
import org.jetel.interpreter.ASTnode.CLVFNVLNode;
import org.jetel.interpreter.ASTnode.CLVFOperator;
import org.jetel.interpreter.ASTnode.CLVFOr;
import org.jetel.interpreter.ASTnode.CLVFOutputFieldLiteral;
import org.jetel.interpreter.ASTnode.CLVFPostfixExpression;
import org.jetel.interpreter.ASTnode.CLVFPrintErrNode;
import org.jetel.interpreter.ASTnode.CLVFPrintLogNode;
import org.jetel.interpreter.ASTnode.CLVFPrintStackNode;
import org.jetel.interpreter.ASTnode.CLVFRaiseErrorNode;
import org.jetel.interpreter.ASTnode.CLVFRegexLiteral;
import org.jetel.interpreter.ASTnode.CLVFReturnStatement;
import org.jetel.interpreter.ASTnode.CLVFSequenceNode;
import org.jetel.interpreter.ASTnode.CLVFStart;
import org.jetel.interpreter.ASTnode.CLVFStartExpression;
import org.jetel.interpreter.ASTnode.CLVFStatementExpression;
import org.jetel.interpreter.ASTnode.CLVFSubNode;
import org.jetel.interpreter.ASTnode.CLVFSwitchStatement;
import org.jetel.interpreter.ASTnode.CLVFSymbolNameExp;
import org.jetel.interpreter.ASTnode.CLVFTryCatchStatement;
import org.jetel.interpreter.ASTnode.CLVFUnaryExpression;
import org.jetel.interpreter.ASTnode.CLVFVarDeclaration;
import org.jetel.interpreter.ASTnode.CLVFVariableLiteral;
import org.jetel.interpreter.ASTnode.CLVFWhileStatement;
import org.jetel.interpreter.ASTnode.CLVFWildCardMapping;
import org.jetel.interpreter.ASTnode.Node;
import org.jetel.interpreter.ASTnode.SimpleNode;
import org.jetel.interpreter.data.TLBooleanValue;
import org.jetel.interpreter.data.TLByteArrayValue;
import org.jetel.interpreter.data.TLContainerValue;
import org.jetel.interpreter.data.TLDateValue;
import org.jetel.interpreter.data.TLListValue;
import org.jetel.interpreter.data.TLMapValue;
import org.jetel.interpreter.data.TLNullValue;
import org.jetel.interpreter.data.TLNumericValue;
import org.jetel.interpreter.data.TLObjectValue;
import org.jetel.interpreter.data.TLRecordValue;
import org.jetel.interpreter.data.TLStringValue;
import org.jetel.interpreter.data.TLValue;
import org.jetel.interpreter.data.TLValueType;
import org.jetel.interpreter.data.TLVariable;
import org.jetel.metadata.DataFieldMetadata;
import org.jetel.metadata.DataRecordMetadata;
import org.jetel.util.string.CharSequenceReader;
import edu.umd.cs.findbugs.annotations.SuppressWarnings;
/**
* Executor of FilterExpression parse tree.
*
* @author dpavlis
* @since 16.9.2004
*
* Executor of FilterExpression parse tree
*/
public class TransformLangExecutor implements TransformLangParserVisitor,
TransformLangParserConstants{
public static final int BREAK_BREAK=1;
public static final int BREAK_CONTINUE=2;
public static final int BREAK_RETURN=3;
protected Stack stack;
protected boolean breakFlag;
protected int breakType;
protected Properties globalParameters;
protected DataRecord[] inputRecords;
protected DataRecord[] outputRecords;
protected Node emptyNode; // used as replacement for empty statements
protected TransformationGraph graph;
protected Log runtimeLogger;
protected ExpParser parser;
static Log logger = LogFactory.getLog(TransformLangExecutor.class);
Map<String, Lookup> lookups = new HashMap<String, Lookup>();
/**
* Constructor
*/
public TransformLangExecutor(Properties globalParameters) {
stack = new Stack();
breakFlag = false;
this.globalParameters=globalParameters;
emptyNode = new SimpleNode(Integer.MAX_VALUE);
}
public TransformLangExecutor() {
this(null);
}
public TransformationGraph getGraph() {
return graph;
}
public void setGraph(TransformationGraph graph) {
this.graph = graph;
}
public Log getRuntimeLogger() {
return runtimeLogger;
}
public void setRuntimeLogger(Log runtimeLogger) {
this.runtimeLogger = runtimeLogger;
}
/**
* Set input data records for processing.<br>
* Referenced input data fields will be resolved from
* these data records.
*
* @param inputRecords array of input data records carrying values
*/
@SuppressWarnings(value="EI2")
public void setInputRecords(DataRecord[] inputRecords){
this.inputRecords=inputRecords;
for (DataRecord record : this.inputRecords) {
if (record == null) record = NullRecord.NULL_RECORD;
}
}
/**
* Set output data records for processing.<br>
* Referenced output data fields will be resolved from
* these data records - assignment (in code) to output data field
* will result in assignment to one of these data records.
*
* @param outputRecords array of output data records for setting values
*/
@SuppressWarnings(value="EI2")
public void setOutputRecords(DataRecord[] outputRecords){
this.outputRecords=outputRecords;
}
/**
* Set global parameters which may be reference from within the
* transformation source code
*
* @param parameters
*/
public void setGlobalParameters(Properties parameters){
this.globalParameters=parameters;
}
/**
* Allows to store parameter/value on stack from
* where it can be read by executed script/function.
* @param obj Object/value to be stored
* @since 10.12.2006
*/
public void setParameter(String obj){
stack.push(new TLStringValue(obj));
}
/**
* Method which returns result of executing parse tree.<br>
* Basically, it returns whatever object was left on top of executor's
* stack (usually as a result of last executed expression/operation).<br>
* It can be called repetitively in order to read all objects from stack.
*
* @return Object saved on stack or NULL if no more objects are available
*/
public TLValue getResult() {
return stack.pop();
}
/**
* Return value of globally defined variable determined by slot number.
* Slot can be obtained by calling <code>TransformLangParser.getGlobalVariableSlot(<i>varname</i>)</code>
*
* @param varSlot
* @return Object - depending of Global variable type
* @since 6.12.2006
*/
public TLVariable getGlobalVariable(int varSlot){
return stack.getGlobalVar(varSlot);
}
/**
* Allows to set value of defined global variable.
*
* @param varSlot
* @param value
* @since 6.12.2006
*/
public void setGlobalVariable(int varSlot,TLVariable value){
stack.storeGlobalVar(varSlot,value);
}
/**
* Allows to set parser which may be used in "evaL"
*
* @param parser
*/
public void setParser(ExpParser parser){
this.parser=parser;
}
/* *********************************************************** */
/* implementation of visit methods for each class of AST node */
/* *********************************************************** */
/* it seems to be necessary to define a visit() method for SimpleNode */
public Object visit(SimpleNode node, Object data) {
// throw new TransformLangExecutorRuntimeException(node,
// "Error: Call to visit for SimpleNode");
return data;
}
public Object visit(CLVFStart node, Object data) {
int i, k = node.jjtGetNumChildren();
for (i = 0; i < k; i++)
node.jjtGetChild(i).jjtAccept(this, data);
return data; // this value is ignored in this example
}
public Object visit(CLVFStartExpression node, Object data) {
int i, k = node.jjtGetNumChildren();
for (i = 0; i < k; i++)
node.jjtGetChild(i).jjtAccept(this, data);
return data; // this value is ignored in this example
}
public Object visit(CLVFOr node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue a=stack.pop();
if (a.type!=TLValueType.BOOLEAN){
Object params[]=new Object[]{a};
throw new TransformLangExecutorRuntimeException(node,params,"logical condition does not evaluate to BOOLEAN value");
}else if (a==TLBooleanValue.TRUE){
stack.push(TLBooleanValue.TRUE);
return data;
}
node.jjtGetChild(1).jjtAccept(this, data);
a=stack.pop();
if (a.type!=TLValueType.BOOLEAN){
Object params[]=new Object[]{a};
throw new TransformLangExecutorRuntimeException(node,params,"logical condition does not evaluate to BOOLEAN value");
}
stack.push( a==TLBooleanValue.TRUE ? TLBooleanValue.TRUE : TLBooleanValue.FALSE);
return data;
}
public Object visit(CLVFAnd node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue a=stack.pop();
if (a.type!=TLValueType.BOOLEAN){
Object params[]=new Object[]{a};
throw new TransformLangExecutorRuntimeException(node,params,"logical condition does not evaluate to BOOLEAN value");
}else if (a==TLBooleanValue.FALSE){
stack.push(TLBooleanValue.FALSE);
return data;
}
node.jjtGetChild(1).jjtAccept(this, data);
a=stack.pop();
if (a.type!=TLValueType.BOOLEAN){
Object params[]=new Object[]{a};
throw new TransformLangExecutorRuntimeException(node,params,"logical condition does not evaluate to BOOLEAN value");
}
stack.push(a==TLBooleanValue.TRUE ? TLBooleanValue.TRUE : TLBooleanValue.FALSE);
return data;
}
public Object visit(CLVFComparison node, Object data) {
int cmpResult = 2;
boolean lValue = false;
TLValue a;
TLValue b;
switch(node.cmpType){
case REGEX_EQUAL:
// special handling for Regular expression
node.jjtGetChild(0).jjtAccept(this, data);
TLValue field1 = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
TLValue field2 = stack.pop();
if (field1.type == TLValueType.STRING
&& field2.getValue() instanceof Matcher) {
Matcher regex = (Matcher) field2.getValue();
regex.reset(((TLStringValue)field1).getCharSequence());
if (regex.matches()) {
lValue = true;
} else {
lValue = false;
}
} else {
Object[] arguments = { field1, field2 };
throw new TransformLangExecutorRuntimeException(node,
arguments, "regex equal - wrong type of literal(s)");
}
break;
case IN_OPER:
// other types of comparison
TLContainerValue list=null;
node.jjtGetChild(0).jjtAccept(this, data);
a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
b = stack.pop();
try{
list = (TLContainerValue)b;
}catch(Exception ex){
Object[] arguments = { a, b};
throw new TransformLangExecutorRuntimeException(node,
arguments, "in - wrong type of literal(s)");
}
// SPECIAL hanadling of IN in case a is NULL
if (a==TLNullValue.getInstance()){
stack.push(TLBooleanValue.FALSE);
return data;
}
try{
lValue=list.contains(a);
}catch(Exception ex){
Object[] arguments = { a, b };
throw new TransformLangExecutorRuntimeException(node,
arguments, "in - incompatible literals/expressions");
}
break;
default:
// other types of comparison
node.jjtGetChild(0).jjtAccept(this, data);
a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
b = stack.pop();
if (!a.type.isCompatible(b.type)) {
// SPECIAL hanadling of EQUAL in case a is NULL
if (a==TLNullValue.getInstance() && node.cmpType==EQUAL){
stack.push(TLBooleanValue.FALSE);
return data;
}
Object arguments[] = { a, b };
throw new TransformLangExecutorRuntimeException(node,
arguments,
"compare - incompatible literals/expressions");
}
switch (a.type) {
case INTEGER:
case LONG:
case NUMBER:
case DECIMAL:
case DATE:
case STRING:
case LIST:
case MAP:
case RECORD:
try{
cmpResult = a.compareTo(b);
}catch(Exception ex){
Object arguments[] = { a, b };
throw new TransformLangExecutorRuntimeException(node,
arguments,
"compare - error during comparison of literals/expressions");
}
break;
case BOOLEAN:
if (node.cmpType == EQUAL || node.cmpType == NON_EQUAL) {
cmpResult = a.equals(b) ? 0 : -1;
} else {
Object arguments[] = { a, b };
throw new TransformLangExecutorRuntimeException(node,
arguments,
"compare - unsupported comparison operator ["
+ tokenImage[node.cmpType]
+ "] for literals/expressions");
}
break;
default:
Object arguments[] = { a, b };
throw new TransformLangExecutorRuntimeException(node,
arguments,
"compare - don't know how to compare literals/expressions");
}
switch (node.cmpType) {
case EQUAL:
if (cmpResult == 0) {
lValue = true;
}
break;// equal
case LESS_THAN:
if (cmpResult == -1) {
lValue = true;
}
break;// less than
case GREATER_THAN:
if (cmpResult == 1) {
lValue = true;
}
break;// grater than
case LESS_THAN_EQUAL:
if (cmpResult <= 0) {
lValue = true;
}
break;// less than equal
case GREATER_THAN_EQUAL:
if (cmpResult >= 0) {
lValue = true;
}
break;// greater than equal
case NON_EQUAL:
if (cmpResult != 0) {
lValue = true;
}
break;
default:
// this should never happen !!!
logger
.fatal("Internal error: Unsupported comparison operator !");
throw new RuntimeException(
"Internal error - Unsupported comparison operator !");
}
}
stack.push(lValue ? TLBooleanValue.TRUE : TLBooleanValue.FALSE);
return data;
}
public Object visit(CLVFAddNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
TLValue b = stack.pop();
if (node.nodeVal==null) {
if (a!=TLNullValue.getInstance()){
node.nodeVal=a.duplicate();
}else if (b!=TLNullValue.getInstance()){
node.nodeVal=b.duplicate();
}else{
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"add - NULL values not allowed");
}
}
try {
if (a.type.isNumeric() && b.type.isNumeric()) {
node.nodeVal.setValue(a);
((TLNumericValue)node.nodeVal).add(((TLNumericValue)b).getNumeric());
stack.push(node.nodeVal);
} else if (a.type==TLValueType.DATE && b.type.isNumeric()) {
Calendar result = Calendar.getInstance();
result.setTime(((TLDateValue)a).getDate());
result.add(Calendar.DATE, ((TLNumericValue)b).getInt());
((TLDateValue)node.nodeVal).getDate().setTime(result.getTimeInMillis());
stack.push(node.nodeVal);
} else if (a.type==TLValueType.STRING) {
//CharSequence a1 = ((TLStringValue)a).getCharSequence();
StringBuilder buf=(StringBuilder)node.nodeVal.getValue();
buf.setLength(0);
buf.append(a.getValue());
if (b.type==TLValueType.STRING) {
buf.append(b.getValue());
} else {
buf.append(b);
}
stack.push(node.nodeVal);
} else {
Object[] arguments = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"add - wrong type of literal(s)");
}
} catch (ClassCastException ex) {
Object arguments[] = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"add - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFSubNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
TLValue b = stack.pop();
if (a==TLNullValue.getInstance() || b==TLNullValue.getInstance()) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"sub - NULL value not allowed");
}
if (!b.type.isNumeric()) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { b },
"sub - wrong type of literal");
}
if (node.nodeVal==null) {
node.nodeVal=a.duplicate();
}
if(a.type.isNumeric()) {
node.nodeVal.setValue(a);
((TLNumericValue)node.nodeVal).sub(((TLNumericValue)b).getNumeric());
stack.push(node.nodeVal);
} else if (a.type==TLValueType.DATE) {
Calendar result = Calendar.getInstance();
result.setTime(((TLDateValue)a).getDate());
result.add(Calendar.DATE, ((TLNumericValue)b).getInt() * -1);
((TLDateValue)node.nodeVal).getDate().setTime(result.getTimeInMillis());
stack.push(node.nodeVal);
} else {
Object[] arguments = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"sub - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFMulNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
TLValue b = stack.pop();
if (a==TLNullValue.getInstance()|| b==TLNullValue.getInstance()) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"mul - NULL value not allowed");
}
if (!a.type.isNumeric() && !b.type.isNumeric()) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a,b },
"mul - wrong type of literals");
}
if (node.nodeVal==null) {
node.nodeVal=a.duplicate();
}else{
node.nodeVal.setValue(a);
}
((TLNumericValue)node.nodeVal).mul(((TLNumericValue)b).getNumeric()); //TODO: hack due to IntegerDecimal problem..
stack.push(node.nodeVal);
return data;
}
public Object visit(CLVFDivNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
TLValue b = stack.pop();
if (a==TLNullValue.getInstance()|| b==TLNullValue.getInstance()) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"div - NULL value not allowed");
}
if (!a.type.isNumeric() && !b.type.isNumeric()) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a,b },
"div - wrong type of literals");
}
if (node.nodeVal==null || node.nodeVal.type!=a.type) {
node.nodeVal=a.duplicate();
}else{
node.nodeVal.setValue(a);
}
try {
((TLNumericValue)node.nodeVal).div(((TLNumericValue)b).getNumeric()); //TODO: hack due to IntegerDecimal problem.
}catch(ArithmeticException ex){
throw new TransformLangExecutorRuntimeException(node, new Object[] { a,b },
"div - arithmetic exception",ex);
}catch (Exception ex) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a,b },
"div - error during operation",ex);
}
stack.push(node.nodeVal);
return data;
}
public Object visit(CLVFModNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
TLValue b = stack.pop();
if (a==TLNullValue.getInstance()|| b==TLNullValue.getInstance()) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"mod - NULL value not allowed");
}
if (!a.type.isNumeric() && !b.type.isNumeric()) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"mod - wrong type of literals");
}
if (node.nodeVal==null) {
node.nodeVal=a.duplicate();
}else{
node.nodeVal.setValue(a);
}
((TLNumericValue)node.nodeVal).mod(((TLNumericValue)b).getNumeric()); //TODO: hack due to IntegerDecimal problem.
stack.push(node.nodeVal);
return data;
}
public Object visit(CLVFIsNullNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue value = stack.pop();
if (value==TLNullValue.getInstance()) {
stack.push(TLBooleanValue.TRUE);
} else {
if (value.type==TLValueType.STRING) {
stack.push( ((TLStringValue)value).getCharSequence().length()==0 ? TLBooleanValue.TRUE : TLBooleanValue.FALSE);
}else {
stack.push(TLBooleanValue.FALSE);
}
}
return data;
}
public Object visit(CLVFNVLNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue value = stack.pop();
if (value==TLNullValue.getInstance()) {
node.jjtGetChild(1).jjtAccept(this, data);
} else {
if (value.type==TLValueType.STRING && ((TLStringValue)value).length()==0) {
node.jjtGetChild(1).jjtAccept(this, data);
}else {
stack.push(value);
}
}
return data;
}
public Object visit(CLVFNVL2Node node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue value = stack.pop();
if (value==TLNullValue.getInstance() || (value.type==TLValueType.STRING && ((TLStringValue)value).length()==0)) {
node.jjtGetChild(2).jjtAccept(this, data);
} else {
node.jjtGetChild(1).jjtAccept(this, data);
}
return data;
}
public Object visit(CLVFLiteral node, Object data) {
stack.push(node.value);
return data;
}
public Object visit(CLVFInputFieldLiteral node, Object data) {
if (inputRecords == null) {
throw new TransformLangExecutorRuntimeException(node, "Cannot access input fields within this scope!");
}
DataRecord record = inputRecords[node.recordNo];
int fieldNo=-1;
if (record == NullRecord.NULL_RECORD || record == null) {
stack.push(TLNullValue.getInstance());
return null;
}
if (node.indexSet){
node.childrenAccept(this, data);
TLValue val=stack.pop();
try{
fieldNo=val.getNumeric().getInt();
}catch(Exception ex){
throw new TransformLangExecutorRuntimeException(node,new Object[] {val},"invalid field index");
}
}
if (node.fieldNo < 0) { // record context
if (node.value == null) {
if (node.indexSet){
try{
node.value = TLValue.convertValue(record.getField(fieldNo));
}catch(Exception ex){
throw new TransformLangExecutorRuntimeException(node, "field index ("+fieldNo +") out of bounds");
}
}else{
node.value = new TLRecordValue(record);
}
} else {
if (node.indexSet){
try{
node.value = TLValue.convertValue(record.getField(fieldNo));
}catch(Exception ex){
throw new TransformLangExecutorRuntimeException(node, "field index ("+fieldNo +") out of bounds");
}
}else{
node.value.setValue(record);
}
}
stack.push(node.value);
// we return reference to DataRecord so we can
// perform extra checking in special cases
return record;
} else {
node.field = record.getField(node.fieldNo);
if (node.field.isNull()) {
stack.push(TLNullValue.getInstance());
return null;
}
if (node.value == null || node.field.getMetadata().getType() == DataFieldMetadata.BOOLEAN_FIELD) {
// since TLBooleanValue is immutable, we have to pass correct reference
node.value = TLValue.convertValue(node.field);
} else {
node.value.setValue(node.field);
}
stack.push(node.value);
// we return reference to DataField so we can
// perform extra checking in special cases
return node.field;
}
}
public Object visit(CLVFOutputFieldLiteral node, Object data) {
//stack.push(inputRecords[node.recordNo].getField(node.fieldNo));
// we return reference to DataField so we can
// perform extra checking in special cases
return data;
}
public Object visit(CLVFRegexLiteral node, Object data) {
stack.push(new TLObjectValue(node.matcher));
return data;
}
public Object visit(CLVFMinusNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue value = stack.pop();
if (value.type.isNumeric()) {
TLNumericValue newVal=(TLNumericValue)value.duplicate();
newVal.getNumeric().mul(Stack.NUM_MINUS_ONE_P);
stack.push(newVal);
} else {
Object arguments[] = { value };
throw new TransformLangExecutorRuntimeException(node,arguments,
"minus - not a number");
}
return data;
}
public Object visit(CLVFIffNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue condition = stack.pop();
if (condition.type==TLValueType.BOOLEAN) {
if (condition==TLBooleanValue.TRUE) {
node.jjtGetChild(1).jjtAccept(this, data);
} else {
node.jjtGetChild(2).jjtAccept(this, data);
}
stack.push(stack.pop());
} else {
Object[] arguments = { condition };
throw new TransformLangExecutorRuntimeException(node,arguments,
"iif - condition does not evaluate to BOOLEAN value");
}
return data;
}
public Object visit(CLVFPrintErrNode node, Object data) {
node.childrenAccept(this, data);
boolean printLocationFlag = false;
// interpret optional parameter
if (node.jjtGetNumChildren() > 1) {
TLValue printLocation = stack.pop();
if (printLocation.type != TLValueType.BOOLEAN) {
throw new TransformLangExecutorRuntimeException(node,new Object[]{printLocation},
"print_err - the second argument does not evaluate to a BOOLEAN value");
}
printLocationFlag = (Boolean)printLocation.getValue();
}
TLValue message = stack.pop();
if (printLocationFlag) {
StringBuilder buf=new StringBuilder((message != null ? message.toString() : "<null>"));
buf.append(" (on line: ").append(node.getLineNumber());
buf.append(" col: ").append(node.getColumnNumber()).append(")");
System.err.println(buf);
}else{
System.err.println(message != null ? message : "<null>");
}
return data;
}
public Object visit(CLVFPrintStackNode node, Object data) {
for (int i=stack.top;i>=0;i--){
System.err.println("["+i+"] : "+stack.stack[i]);
}
System.out.println("** list of local variables ***");
for (int i=0;i<stack.localVarCounter;i++)
System.out.println(stack.localVarSlot[stack.localVarSlotOffset+i]);
return data;
}
/***************************************************************************
* Transformation Language executor starts here.
**************************************************************************/
public Object visit(CLVFForStatement node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data); // set up of the loop
boolean condition = false;
Node loopCondition = node.jjtGetChild(1);
Node increment = node.jjtGetChild(2);
Node body;
try{
body=node.jjtGetChild(3);
}catch(ArrayIndexOutOfBoundsException ex){
body=emptyNode;
}
loopCondition.jjtAccept(this, data); // evaluate the condition
TLValue conVal=stack.pop();
try{
if (conVal.type!=TLValueType.BOOLEAN)
throw new TransformLangExecutorRuntimeException(node,"loop condition does not evaluate to BOOLEAN value");
condition = (conVal==TLBooleanValue.TRUE);
}catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid condition");
}
// loop execution
while (condition) {
body.jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_BREAK || breakType==BREAK_RETURN) {
return data;
}
}
increment.jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// evaluate the condition
loopCondition.jjtAccept(this, data);
condition = (stack.pop()==TLBooleanValue.TRUE);
}
return data;
}
public Object visit(CLVFForeachStatement node, Object data) {
CLVFVariableLiteral varNode=(CLVFVariableLiteral)node.jjtGetChild(0);
CLVFVariableLiteral arrayNode=(CLVFVariableLiteral)node.jjtGetChild(1);
TLVariable variableToAssign = stack.getVar(varNode.localVar,
varNode.varSlot);
TLVariable arrayVariable=stack.getVar(arrayNode.localVar,
arrayNode.varSlot);
Node body;
try{
body=node.jjtGetChild(2);
}catch(ArrayIndexOutOfBoundsException ex){
body=emptyNode;
}
switch(arrayVariable.getType()) {
case LIST:
case RECORD:
case BYTE:
TLContainerValue container=(TLContainerValue)arrayVariable.getTLValue();
for(int i=0; i<container.getLength();i++) {
variableToAssign.setTLValue(container.getStoredValue(i));
body.jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_BREAK || breakType==BREAK_RETURN) {
return data;
}
}
}
break;
case MAP:
Iterator<TLValue> iter = ((TLContainerValue)arrayVariable.getTLValue()).getCollection().iterator();
while(iter.hasNext()) {
variableToAssign.setTLValue(iter.next());
body.jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_BREAK || breakType==BREAK_RETURN) {
return data;
}
}
}
break;
default:
throw new TransformLangExecutorRuntimeException(node,"not a Map/List/Record/ByteArray variable");
}
return data;
}
public Object visit(CLVFWhileStatement node, Object data) {
boolean condition = false;
Node loopCondition = node.jjtGetChild(0);
Node body;
try{
body=node.jjtGetChild(1);
}catch(ArrayIndexOutOfBoundsException ex){
body=emptyNode;
}
loopCondition.jjtAccept(this, data); // evaluate the condition
TLValue conVal=stack.pop();
try{
if (conVal.type!=TLValueType.BOOLEAN)
throw new TransformLangExecutorRuntimeException(node,"loop condition does not evaluate to BOOLEAN value");
condition = (conVal==TLBooleanValue.TRUE);
}catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid condition");
}
// loop execution
while (condition) {
body.jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_BREAK || breakType==BREAK_RETURN) return data;
}
// evaluate the condition
loopCondition.jjtAccept(this, data);
condition = (stack.pop()==TLBooleanValue.TRUE);
}
return data;
}
public Object visit(CLVFIfStatement node, Object data) {
boolean condition = false;
node.jjtGetChild(0).jjtAccept(this, data); // evaluate the
TLValue conVal=stack.pop();
try{
if (conVal.type!=TLValueType.BOOLEAN)
throw new TransformLangExecutorRuntimeException(node,"if condition does not evaluate to BOOLEAN value");
condition = (conVal==TLBooleanValue.TRUE);
} catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid condition");
}
// first if
if (condition) {
node.jjtGetChild(1).jjtAccept(this, data);
} else { // if else part exists
if (node.jjtGetNumChildren() > 2) {
node.jjtGetChild(2).jjtAccept(this, data);
}
}
return data;
}
public Object visit(CLVFDoStatement node, Object data) {
boolean condition = false;
Node loopCondition = node.jjtGetChild(1);
Node body = node.jjtGetChild(0);
// loop execution
do {
body.jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_BREAK || breakType==BREAK_RETURN) return data;
}
// evaluate the condition
loopCondition.jjtAccept(this, data);
TLValue conVal=stack.pop();
try{
if (conVal.type!=TLValueType.BOOLEAN)
throw new TransformLangExecutorRuntimeException(node,"loop condition does not evaluate to BOOLEAN value");
condition = (conVal==TLBooleanValue.TRUE);
}catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid condition");
}
} while (condition);
return data;
}
public Object visit(CLVFSwitchStatement node, Object data) {
// get value of switch && push/leave it on stack
boolean match=false;
node.jjtGetChild(0).jjtAccept(this, data);
TLValue switchVal=stack.pop();
int numChildren = node.jjtGetNumChildren();
int numCases = node.hasDefaultClause ? numChildren-1 : numChildren;
// loop over remaining case statements
for (int i = 1; i < numCases; i++) {
stack.push(switchVal);
if (node.jjtGetChild(i).jjtAccept(this, data)==TLBooleanValue.TRUE){
match=true;
}
if (breakFlag) {
if (breakType == BREAK_BREAK) {
breakFlag = false;
}
break;
}
}
// test whether execute default branch
if (node.hasDefaultClause && !match){
node.jjtGetChild(numChildren-1).jjtAccept(this, data);
}
return data;
}
public Object visit(CLVFCaseExpression node, Object data) {
// test if literal (as child 0) is equal to data on stack
// if so, execute block (child 1)
boolean match = false;
TLValue switchVal = stack.pop();
node.jjtGetChild(0).jjtAccept(this, data);
TLValue value = stack.pop();
try {
match=(switchVal.compareTo(value)==0);
} catch (ClassCastException ex) {
Object[] args=new Object[] {switchVal,value};
throw new TransformLangExecutorRuntimeException(node,args,"incompatible literals in case clause");
}catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid case value");
}catch (IllegalArgumentException ex){
Object[] args=new Object[] {switchVal,value};
throw new TransformLangExecutorRuntimeException(node,args,"incompatible literals in case clause");
}
if (match){
node.jjtGetChild(1).jjtAccept(this, data);
return TLBooleanValue.TRUE;
}
return TLBooleanValue.FALSE;
}
public Object visit(CLVFTryCatchStatement node, Object data) {
try {
node.jjtGetChild(0).jjtAccept(this, data); // evaluate the
} catch (Exception ex) {
if (node.jjtGetNumChildren() > 2) {
// populate chosen variable with exception name
CLVFVariableLiteral varLit = (CLVFVariableLiteral) node.jjtGetChild(1);
TLVariable var = stack.getVar(varLit.localVar, varLit.varSlot);
if (var.getType() != TLValueType.STRING) {
throw new TransformLangExecutorRuntimeException(node, "variable \"" + var.getName() + "\" is not of type string in catch() block");
}
var.getTLValue().setValue(ex.getCause()== null ? ex.getClass().getName() : ex.getCause().getClass().getName());
// call the catch block when variable is present
node.jjtGetChild(2).jjtAccept(this, data);
} else {
// call the catch block - simple variant
node.jjtGetChild(1).jjtAccept(this, data);
}
}
return data;
}
public Object visit(CLVFIncrDecrStatement node, Object data) {
Node childNode = node.jjtGetChild(0);
CLVFVariableLiteral varNode=(CLVFVariableLiteral) childNode;
TLVariable var=stack.getVar(varNode.localVar, varNode.varSlot);
if (var.getType().isNumeric()) {
((TLNumericValue)var.getTLValue()).getNumeric().add( node.kind==INCR ? Stack.NUM_ONE_P : Stack.NUM_MINUS_ONE_P);
}else if (var.getType()==TLValueType.DATE) {
stack.calendar.setTime(((TLDateValue)var.getTLValue()).getDate());
stack.calendar.add(Calendar.DATE, node.kind == INCR ? 1 : -1);
var.getTLValue().setValue(stack.calendar.getTime());
}else {
throw new TransformLangExecutorRuntimeException(node,"variable is not of numeric or date type");
}
return data;
}
public Object visit(CLVFBlock node, Object data) {
int childern = node.jjtGetNumChildren();
for (int i = 0; i < childern; i++) {
node.jjtGetChild(i).jjtAccept(this, data);
// have we seen contiue/break/return statement ??
if (breakFlag){
if (breakType!=BREAK_RETURN)
stack.pop();
return data;
}
stack.pop();
}
return data;
}
/*
* Loop & block & function control nodes
*/
public Object visit(CLVFBreakStatement node, Object data) {
breakFlag = true; // we encountered break statement;
breakType=BREAK_BREAK;
return data;
}
public Object visit(CLVFContinueStatement node, Object data) {
breakFlag = true; // we encountered continue statement;
breakType= BREAK_CONTINUE;
return data;
}
public Object visit(CLVFReturnStatement node, Object data) {
if (node.jjtHasChildren()){
node.jjtGetChild(0).jjtAccept(this, data);
}
breakFlag = true;
breakType = BREAK_RETURN;
return data;
}
public Object visit(CLVFBreakpointNode node, Object data) {
// list all variables
System.err.println("** list of global variables ***");
for (int i=0;i<stack.globalVarSlot.length;System.out.println(stack.globalVarSlot[i++]));
System.err.println("** list of local variables ***");
for (int i=0;i<stack.localVarCounter;i++)
System.out.println(stack.localVarSlot[stack.localVarSlotOffset+i]);
return data;
}
/*
* Variable declarations
*/
public Object visit(CLVFVarDeclaration node, Object data) {
TLValue value=null;
// create global/local variable
switch (node.type) {
case INT_VAR:
value = new TLNumericValue(TLValueType.INTEGER);
break;
case LONG_VAR:
value = new TLNumericValue(TLValueType.LONG);
break;
case DOUBLE_VAR:
value = new TLNumericValue(TLValueType.NUMBER);
break;
case DECIMAL_VAR:
{
if (node.length > 0) {
if (node.precision > 0) {
value = new TLNumericValue(TLValueType.DECIMAL, DecimalFactory.getDecimal(node.length,
node.precision));
} else {
value = new TLNumericValue(TLValueType.DECIMAL,DecimalFactory.getDecimal(node.length, 0));
}
} else {
value = new TLNumericValue(TLValueType.DECIMAL,DecimalFactory.getDecimal());
}
((TLNumericValue)value).getValue().setValue(0.0d);
}
break;
case STRING_VAR:
value = new TLStringValue();
break;
case DATE_VAR:
value = new TLDateValue();
break;
case BOOLEAN_VAR:
value = TLBooleanValue.getInstance(false);
break;
case BYTE_VAR:
{
if (node.length>0) {
value = new TLByteArrayValue(node.length);
}else {
value = new TLByteArrayValue();
}
}
break;
case LIST_VAR:
{
if (node.length>0) {
value = new TLListValue(node.length);
((TLListValue)value).fill(TLNullValue.getInstance(), node.length);
}else {
value = new TLListValue();
}
}
break;
case MAP_VAR:
{
if (node.length>0){
value = new TLMapValue(node.length);
}else {
value = new TLMapValue();
}
}
break;
case RECORD_VAR:
DataRecordMetadata metadata = null;
if (node.recordNo >= 0) {
metadata = parser.getInRecordMeta(node.recordNo);
} else {
try {
metadata = graph.getDataRecordMetadata(node.metadataId, true);
} catch (Exception ex) {
throw new TransformLangExecutorRuntimeException(node, "error in Record declaration", ex);
}
}
if (metadata == null) {
throw new TransformLangExecutorRuntimeException(node, "record variable declaration - " + "can't find metadata ID \"" + (node.metadataId != null ? node.metadataId : "<unknown ID>") + "\"");
}
value = new TLRecordValue(metadata);
break;
default:
throw new TransformLangExecutorRuntimeException(node,
"variable declaration - "
+ "unknown type for variable \""
+ node.name + "\"");
}
TLVariable variable=new TLVariable(node.name,value);
stack.storeVar(node.localVar, node.varSlot,variable );
if (node.hasInitValue) {
// can have spec node & initialization
node.jjtGetChild(node.jjtGetNumChildren()>1 ? 1 : 0).jjtAccept(this, data);
TLValue initValue = stack.pop();
TLValueType type =variable.getType();
if (type.isCompatible(initValue.type)) {
variable.setTLValue(initValue);
}else {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of \"" + initValue
+ "\" ("+initValue.type +")to variable \"" + node.name
+ "\" ("+type +")- incompatible data types");
}
}
return data;
}
public Object visit(CLVFVariableLiteral node, Object data) {
TLVariable var = stack.getVar(node.localVar, node.varSlot);
TLValue index = null;
if (node.indexSet) {
try {
switch (var.getType()) {
case LIST:
node.jjtGetChild(0).jjtAccept(this, data);
index = stack.pop();
stack.push(((TLContainerValue)var.getTLValue()).getStoredValue(((TLNumericValue)index).getInt()));
break;
case MAP:
node.jjtGetChild(0).jjtAccept(this, data);
index = stack.pop();
stack.push(((TLContainerValue)var.getTLValue()).getStoredValue(index));
break;
case RECORD:
if (node.fieldID != null) {
if (node.arrayIndex == -1) {
node.arrayIndex = ((DataRecord) var
.getTLValue().getValue()).getMetadata()
.getFieldPosition(node.fieldID);
if (node.arrayIndex==-1) {
throw new TransformLangExecutorRuntimeException(
node, "invalid field ID \""
+ node.fieldID
+ "\" of variable \""
+ var.getName() + "\" - type "
+ var.getType().toString());
}
}
stack.push(((TLContainerValue) var.getTLValue())
.getStoredValue(node.arrayIndex));
} else {
node.jjtGetChild(0).jjtAccept(this, data);
index = stack.pop();
stack.push(((TLContainerValue) var.getTLValue())
.getStoredValue(index));
}
break;
case BYTE:
node.jjtGetChild(0).jjtAccept(this, data);
index = stack.pop();
stack.push(((TLContainerValue)var.getTLValue()).getStoredValue(((TLNumericValue)index).getInt()));
break;
default:
throw new TransformLangExecutorRuntimeException(node,"invalid usage if index for variable \""
+ var.getName() + "\" - type "
+ var.getType().toString());
}
}catch (TransformLangExecutorRuntimeException ex1){
throw ex1;
}catch (Exception ex) {
throw new TransformLangExecutorRuntimeException(node,
"invalid index \"" + index + "\" of variable \""
+ var.getName() + "\" - type "
+ var.getType().toString(), ex);
}
}else {
stack.push(var.getTLValue());
}
return var;
}
public Object visit(CLVFAssignment node, Object data) {
CLVFVariableLiteral varNode = (CLVFVariableLiteral) node.jjtGetChild(0);
TLVariable variableToAssign = stack.getVar(varNode.localVar,
varNode.varSlot);
node.jjtGetChild(1).jjtAccept(this, data);
TLValue valueToAssign = stack.pop();
if (valueToAssign==null) {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of null value to variable \"" + varNode.varName+"\"");
}
int actualType = varNode.varType;
/*
* Function parameters are of type OBJECT. This is determined in compile time
* However if the function parameter is passing a value of data record,
* map or list, it will never by assigned correctly.
* Therefore we have to determine the type dynamically in runtime.
*/
if (actualType == OBJECT_VAR) {
TLValueType paramType = variableToAssign.getType(); // retrieve actual type
switch (paramType) {
case RECORD:
actualType = RECORD_VAR;
break;
case MAP:
actualType = MAP_VAR;
break;
case LIST:
actualType = LIST_VAR;
break;
}
}
switch (actualType) {
case LIST_VAR:
TLNumericValue index2List = null;
if (varNode.scalarContext) {
try {
if (varNode.indexSet) {
varNode.jjtGetChild(0).jjtAccept(this, data);
index2List = (TLNumericValue)stack.pop();
variableToAssign
.setTLValue(index2List.getInt(), valueToAssign);
} else {
variableToAssign.setTLValue(-1, valueToAssign);
}
} catch (IndexOutOfBoundsException ex) {
throw new TransformLangExecutorRuntimeException(
node,
"index \""
+ index2List
+ "\" is outside current limits of list/array: \""
+ varNode.varName + "\"", ex);
} catch (Exception ex) {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of \"" + valueToAssign
+ "\" to variable \"" + varNode.varName
+ "\"", ex);
}
} else {
// list context
if (valueToAssign.type.isArray() ) {
variableToAssign.setTLValue(valueToAssign);
} else {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of value \""+valueToAssign+"\" to list/array \""
+ varNode.varName + "\"");
}
}
break;
case RECORD_VAR:
TLValue fieldIndex = null;
if (varNode.scalarContext) {
try {
if (varNode.fieldID != null) {
if (varNode.arrayIndex == -1) {
varNode.arrayIndex = ((DataRecord) variableToAssign
.getTLValue().getValue()).getMetadata()
.getFieldPosition(varNode.fieldID);
// check if the referenced field exists and we were able to resolve it
if (varNode.arrayIndex == -1) {
final String fieldName = varNode.fieldID == null ? "null" : varNode.fieldID;
throw new TransformLangExecutorRuntimeException(node,
"referenced field \"" + fieldName + "\" does not exist");
}
}
((TLContainerValue) variableToAssign.getTLValue())
.setStoredValue(varNode.arrayIndex,
valueToAssign);
} else {
varNode.jjtGetChild(0).jjtAccept(this, data);
fieldIndex = stack.pop();
((TLContainerValue) variableToAssign.getTLValue())
.setStoredValue(fieldIndex, valueToAssign);
}
} catch (Exception ex) {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of \"" + valueToAssign
+ "\" to variable \"" + varNode.varName
+ "\"", ex);
}
} else {
try {
variableToAssign.getTLValue().setValue(valueToAssign);
} catch (Exception ex) {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of \"" + valueToAssign
+ "\" to variable \"" + varNode.varName
+ "\"", ex);
}
}
break;
case MAP_VAR:
TLValue indexMap=null;
if (varNode.scalarContext) {
if (varNode.indexSet) {
try {
varNode.jjtGetChild(0).jjtAccept(this, data);
indexMap = stack.pop();
((TLContainerValue)variableToAssign.getTLValue()).setStoredValue(indexMap,valueToAssign);
} catch (Exception ex) {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of \"" + valueToAssign
+ "\" to variable \"" + varNode.varName
+ "\"", ex);
}
} else {
if (valueToAssign instanceof TLContainerValue)
((TLContainerValue)variableToAssign.getTLValue()).setValue(valueToAssign);
// no key specified,
else throw new TransformLangExecutorRuntimeException(node,
"no key defined when assigning to Map variable \"" + varNode.varName);
}
} else {
try {
variableToAssign.getTLValue().setValue(valueToAssign);
} catch (Exception ex) {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of \"" + valueToAssign
+ "\" to variable \"" + varNode.varName
+ "\"", ex);
}
}
break;
case BYTE_VAR:
TLNumericValue indexByteArray = null;
if (varNode.scalarContext) {
try {
// scalar context
if (varNode.indexSet) {
varNode.jjtGetChild(0).jjtAccept(this, data);
indexByteArray = (TLNumericValue)stack.pop();
((TLContainerValue)variableToAssign.getTLValue())
.setStoredValue(indexByteArray.getInt(), valueToAssign);
} else {
((TLContainerValue)variableToAssign.getTLValue()).setStoredValue(-1, valueToAssign);
}
} catch (IndexOutOfBoundsException ex) {
throw new TransformLangExecutorRuntimeException(
node,
"index \""
+ indexByteArray
+ "\" is outside current limits byte array \""
+ varNode.varName + "\"", ex);
} catch (Exception ex) {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of \"" + valueToAssign
+ "\" to variable \"" + varNode.varName
+ "\"", ex);
}
} else {
// list context
if (valueToAssign.type.isArray() || valueToAssign==TLNullValue.getInstance()) {
variableToAssign.setTLValue(valueToAssign);
} else {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of scalar value to byte array \""
+ varNode.varName + "\"");
}
}
break;
default:
TLValueType type=variableToAssign.getType();
if (type.isCompatible(valueToAssign.type)) {
try{
variableToAssign.setTLValue(valueToAssign);
}catch(Exception ex){
throw new TransformLangExecutorRuntimeException(node,"invalid assignment of \"" + valueToAssign.toString()
+ "\" [" + valueToAssign.type
+ "] to variable \""
+ variableToAssign.getName() + "\" ["
+ variableToAssign.getType()
+ "] \" - "+ex.getMessage(),ex);
}
} else {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of \"" + valueToAssign.toString()
+ "\" [" + valueToAssign.type
+ "] to variable \""
+ variableToAssign.getName() + "\" ["
+ variableToAssign.getType()
+ "] \" - incompatible data types");
}
}
return data;
}
public Object visit(CLVFDirectMapping node, Object data) {
DataField field = outputRecords[node.recordNo].getField(node.fieldNo);
TLValue value = null;
switch (node.mappingType) {
case MultipleLiteral2Field:
final int arity = node.arity;
try {
// we try till success or no more options
for (int i = 0; i < arity; i++) {
node.jjtGetChild(i).jjtAccept(this, data);
value = stack.pop();
try {
value.copyToDataField(field);
break; // success during assignment, finish looping
} catch (Exception ex) {
if (i == arity - 1)
throw ex;
}
}
} catch (BadDataFormatException ex) {
if (!outputRecords[node.recordNo].getField(node.fieldNo).getMetadata().isNullable()) {
throw new TransformLangExecutorRuntimeException(node, "can't assign NULL to \"" + node.fieldName + "\"");
}
throw new TransformLangExecutorRuntimeException(node, "bad data when mapping field \"" + node.fieldName + "\" (" + field.getMetadata().getName() + ":" + field.getMetadata().getTypeAsString() + ") - assigning \"" + value + "\" (" + value.type + ")");
} catch (TransformLangExecutorRuntimeException ex) {
throw ex;
} catch (Exception ex) {
String msg = ex.getMessage();
throw new TransformLangExecutorRuntimeException(node, (msg != null ? msg : "") + " when mapping \"" + node.fieldName + "\" (" + DataFieldMetadata.type2Str(field.getType()) + ") - assigning \"" + value + "\" (" + (value != null ? value.getType().getName() : "unknown type") + ")");
}
break;
case Field2Field:
try {
CLVFInputFieldLiteral childNode=((CLVFInputFieldLiteral)node.jjtGetChild(0));
childNode.bindToField(inputRecords);
node.srcField=childNode.field;
field.setValue(node.srcField);
} catch (BadDataFormatException ex) {
if (!outputRecords[node.recordNo].getField(node.fieldNo).getMetadata().isNullable() && node.srcField.isNull()) {
throw new TransformLangExecutorRuntimeException(node, "can't assign NULL to \"" + node.fieldName + "\"");
}else{
throw new TransformLangExecutorRuntimeException(node, "bad data when mapping field \"" + node.fieldName + "\" (" + field.getMetadata().getName() + ":" + field.getMetadata().getTypeAsString() + ") - assigning \"" + node.srcField.toString() +
"\" (" + node.srcField.getMetadata().getName() + ":" + node.srcField.getMetadata().getTypeAsString() +" )");
}
} catch (Exception ex) {
String msg = ex.getMessage();
throw new TransformLangExecutorRuntimeException(node, (msg != null ? msg : "") + " when mapping \"" + node.fieldName + "\" (" + DataFieldMetadata.type2Str(field.getType()) + ") - assigning \"" + value + "\" (" + (value != null ? value.getType().getName() : "unknown type") + ")");
}
break;
case Literal2Field:
try {
node.jjtGetChild(0).jjtAccept(this, data);
value = stack.pop();
value.copyToDataField(field);
} catch (BadDataFormatException ex) {
if (!outputRecords[node.recordNo].getField(node.fieldNo).getMetadata().isNullable()) {
throw new TransformLangExecutorRuntimeException(node, "can't assign NULL to \"" + node.fieldName + "\"",ex);
}
throw new TransformLangExecutorRuntimeException(node, "bad data when mapping field \"" + node.fieldName + "\" (" + field.getMetadata().getName() + ":" + field.getMetadata().getTypeAsString() + ") - assigning \"" + value + "\" (" + value.type + ")",ex);
} catch (TransformLangExecutorRuntimeException ex) {
throw ex;
} catch (Exception ex) {
String msg = ex.getMessage();
throw new TransformLangExecutorRuntimeException(node, (msg != null ? msg : "") + " when mapping \"" + node.fieldName + "\" (" + DataFieldMetadata.type2Str(field.getType()) + ") - assigning \"" + value + "\" (" + (value != null ? value.getType().getName() : "unknown type") + ")");
}
break;
default:
// this should not happen
throw new TransformLangExecutorRuntimeException(node, "unrecognized mapping type (internal error)");
}
return data;
}
public Object visit(CLVFWildCardMapping node, Object data) {
if (!node.initialized) {
try {
node.custTrans.setLogger(logger);
node.custTrans.init(null, parser.getInRecordMetadata(), parser
.getOutRecordMetadata());
} catch (ComponentNotReadyException ex) {
throw new TransformLangExecutorRuntimeException(node,ex.getMessage(),ex);
}
node.initialized = true;
}
try {
node.custTrans.transform(inputRecords, outputRecords);
} catch (Exception ex) {
throw new TransformLangExecutorRuntimeException(node,ex.getMessage(),ex);
}
return data;
}
/*
* Declaration & calling of Functions here
*/
public Object visit(CLVFFunctionCallStatement node, Object data) {
// EXTERNAL FUNCTION
if (node.externalFunction != null) {
// put call parameters on stack
node.childrenAccept(this, data);
// convert stack content into values
try {
node.context.setGraph(this.graph);
TLValue returnVal = node.externalFunction.execute(stack.pop(
node.externalFunctionParams, node.jjtGetNumChildren()),
node.context);
stack.push(returnVal);
} catch (TransformLangExecutorRuntimeException ex) {
ex.setNode(node);
throw ex;
} catch (Exception ex){
String msg="Java exception ["+ex.getClass().getName()+"] occured during call of external function: "+node.externalFunction.getLibrary()+"."+node.externalFunction.getName();
logger.debug(msg,ex);
throw new TransformLangExecutorRuntimeException(node,msg,ex);
}
} else {
// INTERNAL FUNCTION
// put call parameters on stack
node.childrenAccept(this, data);
CLVFFunctionDeclaration executionNode = node.callNode;
// open call frame
stack.pushFuncCallFrame();
// store call parameters from stack as local variables
for (int i = executionNode.numParams - 1; i >= 0; stack
.storeLocalVar(i--, new TLVariable("local", stack.pop())))
;
// execute function body
// loop execution
TLValue returnData;
final int numChildren = executionNode.jjtGetNumChildren();
for (int i = 0; i < numChildren; i++) {
executionNode.jjtGetChild(i).jjtAccept(this, data);
returnData = stack.pop(); // in case there is anything on top
// of stack
// check for break or continue statements
if (breakFlag) {
breakFlag = false;
if (breakType == BREAK_RETURN) {
if (returnData != null)
stack.push(returnData);
break;
}
}
}
stack.popFuncCallFrame();
}
return data;
}
public Object visit(CLVFFunctionDeclaration node, Object data) {
return data;
}
public Object visit(CLVFStatementExpression node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
return data;
}
public Object executeFunction(CLVFFunctionDeclaration executionNode, TLValue[] data) {
//put call parameters on stack
if (data==null){
data=new TLValue[0];
}
//TODO - check for function call parameter types
// open call frame
stack.pushFuncCallFrame();
// store call parameters from stack as local variables
for (int i=executionNode.numParams-1;i>=0; i--) {
stack.storeLocalVar(i, new TLVariable(executionNode.varNames[i], data[i]));
}
// execute function body
// loop execution
TLValue returnData;
final int numChildren=executionNode.jjtGetNumChildren();
for (int i=0;i<numChildren;i++){
executionNode.jjtGetChild(i).jjtAccept(this,data);
returnData=stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_RETURN){
if (returnData!=null)
stack.push(returnData);
break;
}
}
}
stack.popFuncCallFrame();
return data;
}
public Object visit(CLVFRaiseErrorNode node,Object data){
node.jjtGetChild(0).jjtAccept(this, data);
TLValue a = stack.pop();
throw new TransformLangExecutorRuntimeException(node,null,
"!!! Exception raised by user: "+((a!=null) ? a.toString() : "no message"));
}
public Object visit(CLVFEvalNode node, Object data) {
// get TL expression
node.jjtGetChild(0).jjtAccept(this, data);
String src=stack.pop().toString();
Node parseTree;
// construct parser
try{
((TransformLangParser)parser).ReInit(new CharSequenceReader(src));
if (node.expMode)
parseTree = ((TransformLangParser)parser).StartExpression();
else
parseTree = ((TransformLangParser)parser).Start();
}catch(ParseException ex){
throw new TransformLangExecutorRuntimeException(node,
"Can't parse \"eval\" expression:"+ex.getMessage());
}catch(NullPointerException ex){
throw new RuntimeException("Error in \"eval\" execution/parsing (parser is missing)." ,ex);
}
/*
* option to permanently store parsed expression in this tree
if (true){
//add this subtree to eclosing AST
}
*/
// execute eval
if (node.expMode)
visit((CLVFStartExpression)parseTree,data);
else
visit((CLVFStart)parseTree,data);
return data;
}
public Object visit(CLVFSequenceNode node,Object data){
if (node.sequence==null){
if (graph!=null){
node.sequence=graph.getSequence(node.sequenceName);
}else{
throw new TransformLangExecutorRuntimeException(node,
"Can't obtain Sequence \""+node.sequenceName+
"\" from graph - graph is not assigned");
}
if (node.sequence==null){
throw new TransformLangExecutorRuntimeException(node,
"Can't obtain Sequence \""+node.sequenceName+
"\" from graph \""+graph.getName()+"\"");
}
// initialize the sequence if necessary
if (!node.sequence.isInitialized()) {
try {
node.sequence.init();
} catch (ComponentNotReadyException e) {
throw new TransformLangExecutorRuntimeException(
node,"Unable to initialize sequence "
+ "\"" + node.sequenceName + "\"",e);
}
}
}
if (node.value==null){
switch(node.retType){
case LONG_VAR:
node.value=TLValue.create(TLValueType.LONG);
break;
case STRING_VAR:
node.value=TLValue.create(TLValueType.STRING);
break;
default:
node.value=TLValue.create(TLValueType.INTEGER);
}
}
TLValue retVal=node.value;
switch(node.opType){
case CLVFSequenceNode.OP_RESET:
// try{
node.sequence.resetValue();
// }catch(ComponentNotReadyException ex){
// throw new TransformLangExecutorRuntimeException(node,"Error when resetting sequence \""+node.sequenceName+"\"",ex);
// }
retVal=TLNumericValue.ZERO;
break;
case CLVFSequenceNode.OP_CURRENT:
switch(node.retType){
case LONG_VAR:
((TLNumericValue)retVal).setValue(node.sequence.currentValueLong());
break;
case STRING_VAR:
retVal.setValue(node.sequence.currentValueString());
break;
default:
((TLNumericValue)retVal).setValue(node.sequence.currentValueInt());
}
break;
default: // default is next value from sequence
switch(node.retType){
case LONG_VAR:
((TLNumericValue)retVal).setValue(node.sequence.nextValueLong());
break;
case STRING_VAR:
retVal.setValue(node.sequence.nextValueString());
break;
default:
((TLNumericValue)retVal).setValue(node.sequence.nextValueInt());
}
}
stack.push(retVal);
return data;
}
public Object visit(CLVFLookupNode node, Object data) {
DataRecord record = null;
if (node.lookupTable == null) {
node.lookupTable = graph.getLookupTable(node.lookupName);
if (node.lookupTable == null) {
throw new TransformLangExecutorRuntimeException(node,
"Can't obtain LookupTable \"" + node.lookupName
+ "\" from graph \"" + graph.getName() + "\"");
}
else {
// we have to initialize the lookup table ourselves, graph is not doing it for us
try {
if (! node.lookupTable.isInitialized()) {
node.lookupTable.init();
}
} catch (ComponentNotReadyException e) {
throw new TransformLangExecutorRuntimeException(node,
"Error when initializing lookup table \""
+ node.lookupName + "\" :", e);
}
}
if (node.lookup == null && lookups.containsKey(node.lookupTable.getId())) {
node.lookup = lookups.get(node.lookupTable.getId());
}
if (node.opType == CLVFLookupNode.OP_GET || node.opType==CLVFLookupNode.OP_NEXT) {
DataRecordMetadata metadata = node.lookupTable.getMetadata();
if (metadata != null) {
node.fieldNum = metadata.getFieldPosition(
node.fieldName);
if (node.fieldNum < 0) {
throw new TransformLangExecutorRuntimeException(node,
"Invalid field name \"" + node.fieldName
+ "\" at LookupTable \"" + node.lookupName
+ "\" in graph \"" + graph.getName() + "\"");
}
}
}
}
switch (node.opType) {
case CLVFLookupNode.OP_INIT:
// The code is removed from CTL1 after discuession with Kokon, in CTL2 these functions do not exist anymore as they are not needed
/*try {
node.lookupTable.init();
node.lookupTable.preExecute();
} catch (ComponentNotReadyException ex) {
throw new TransformLangExecutorRuntimeException(node,
"Error when initializing lookup table \""
+ node.lookupName + "\" :", ex);
}*/
return data;
case CLVFLookupNode.OP_FREE:
// The code is removed from CTL1 after discuession with Kokon, in CTL2 these functions do not exist anymore as they are not needed
/*node.lookupTable.free();
node.lookup = null;
lookups.remove(node.lookupTable.getId());*/
return data;
case CLVFLookupNode.OP_NUM_FOUND:
stack.push(new TLNumericValue(TLValueType.INTEGER, new CloverInteger(
node.lookup.getNumFound())));
return data;
case CLVFLookupNode.OP_GET:
node.childrenAccept(this, data);
if (node.lookup == null) {
try {
node.createLookup(stack);
} catch (ComponentNotReadyException ex) {
throw new TransformLangExecutorRuntimeException(node,
"Error when initializing lookup table \""
+ node.lookupName + "\" :", ex);
}
lookups.put(node.lookupTable.getId(), node.lookup);
}
node.seek(stack);
if (node.fieldNum == -1) {
node.fieldNum = node.lookupTable.getMetadata().getFieldPosition(node.fieldName);
}
if (node.lookup.hasNext()) {
record = node.lookup.next();
}else{
record = null;
}
break;
default: // CLVFLookupNode.OP_NEXT:
if (node.lookup.hasNext()) {
record = node.lookup.next();
}else{
record = null;
}
}
if (record != null) {
stack.push(TLValue.convertValue(record.getField(node.fieldNum)));
} else {
stack.push(TLNullValue.getInstance());
}
return data;
}
public Object visit(CLVFDictionaryNode node, Object data) {
final Dictionary d = graph.getDictionary();
if (d == null) {
throw new TransformLangExecutorRuntimeException("No dictionary defined on the graph");
}
TLValue key = null;
TLValue value = null;
switch (node.operation) {
case CLVFDictionaryNode.OP_READ:
// evaluate the key
node.jjtGetChild(0).jjtAccept(this, data);
key = stack.pop();
if (key.getType() != TLValueType.STRING) {
throw new TransformLangExecutorRuntimeException("Dictionary supports only non-null string keys");
}
final Object dictValue = d.getValue(((StringBuilder)key.getValue()).toString());
stack.push(dictValue == null ? TLNullValue.getInstance() : new TLStringValue(dictValue.toString()));
break;
case CLVFDictionaryNode.OP_WRITE:
node.jjtGetChild(0).jjtAccept(this, data);
key = stack.pop();
if (key.getType() != TLValueType.STRING) {
throw new TransformLangExecutorRuntimeException("Dictionary supports only string keys");
}
final String keyToWrite = ((StringBuilder)key.getValue()).toString();
node.jjtGetChild(1).jjtAccept(this, data);
value = stack.pop();
String valueToWrite = null;
if (value == TLNullValue.getInstance()) {
// writing null value
valueToWrite = null;
} else if (value.getType() == TLValueType.STRING) {
// convert string value to string
valueToWrite = ((StringBuilder)value.getValue()).toString();
} else {
// anything non-null, non-string is error
throw new TransformLangExecutorRuntimeException("Dictionary supports only string values");
}
try {
d.setValue(keyToWrite, StringDictionaryType.TYPE_ID, valueToWrite);
} catch (ComponentNotReadyException e) {
throw new TransformLangExecutorRuntimeException("Cannot set dictionary key '" + keyToWrite + "' to value '" + valueToWrite + "'",e);
}
break;
case CLVFDictionaryNode.OP_DELETE:
// evaluate the key
node.jjtGetChild(0).jjtAccept(this, data);
key = stack.pop();
if (key.getType() != TLValueType.STRING) {
throw new TransformLangExecutorRuntimeException("Dictionary supports only non-null string keys");
}
final String keyToDelete = ((StringBuilder)key.getValue()).toString();
try {
d.setValue(keyToDelete, null);
} catch (ComponentNotReadyException e) {
throw new TransformLangExecutorRuntimeException("Cannot delete key '" + keyToDelete + "'");
}
break;
default:
throw new TransformLangExecutorRuntimeException("Unknown dictionary operation: " + node.operation);
}
return data;
}
public Object visit(CLVFPrintLogNode node, Object data) {
if (runtimeLogger == null) {
throw new TransformLangExecutorRuntimeException(node,
"Can NOT perform logging operation - no logger defined");
}
node.jjtGetChild(0).jjtAccept(this, data);
TLValue msg = stack.pop();
switch (node.level) {
case 1: //| "debug"
runtimeLogger.debug(msg);
break;
case 2: //| "info"
runtimeLogger.info(msg);
break;
case 3: //| "warn"
runtimeLogger.warn(msg);
break;
case 4: //| "error"
runtimeLogger.error(msg);
break;
case 5: //| "fatal"
runtimeLogger.fatal(msg);
break;
default:
runtimeLogger.trace(msg);
}
return data;
}
public Object visit(CLVFImportSource node,Object data) {
node.childrenAccept(this, data);
return data;
}
public Object visit(CLVFSymbolNameExp node,Object data) {
stack.push(node.typeValue);
return data;
}
public Object visit(CLVFOperator node,Object data) {
return data;
}
public Object visit(CLVFPostfixExpression node,Object data) {
// get variable && put value on stack by executing child node
Node child=node.jjtGetChild(0);
if (! (child instanceof CLVFVariableLiteral)){
throw new TransformLangExecutorRuntimeException(node,"postfix expression is allowed only on variable");
}
TLVariable var=(TLVariable)child.jjtAccept(this, data);
int operatorType=((CLVFOperator)node.jjtGetChild(1)).kind;
// value instance on stack is variable's internal value
// duplicate it before incrementing
TLValue origValue = stack.pop();
stack.push(origValue.duplicate());
if (operatorType==INCR) {
if (var.getType().isNumeric()) {
((TLNumericValue)var.getTLValue()).add(Stack.NUM_ONE_P);
}else if (var.getType()==TLValueType.DATE) {
stack.calendar.setTime(((TLDateValue)var.getTLValue()).getDate());
stack.calendar.add(Calendar.DATE, 1);
((TLDateValue)var.getTLValue()).setValue(stack.calendar.getTime());
}else {
throw new TransformLangExecutorRuntimeException(node,"variable ["+var+"] is not of numeric or date type");
}
}else{
if (var.getType().isNumeric()) {
((TLNumericValue)var.getTLValue()).sub(Stack.NUM_ONE_P);
}else if (var.getType()==TLValueType.DATE) {
stack.calendar.setTime(((TLDateValue)var.getTLValue()).getDate());
stack.calendar.add(Calendar.DATE, -1);
var.getTLValue().setValue(stack.calendar.getTime());
}else {
throw new TransformLangExecutorRuntimeException(node,"variable ["+var+"] is not of numeric or date type");
}
}
return data;
}
public Object visit(CLVFUnaryExpression node,Object data) {
int operatorType=((CLVFOperator)node.jjtGetChild(0)).kind;
Node child=node.jjtGetChild(1);
TLValue val;
switch (operatorType) {
case INCR:
case DECR:
// get variable && put value on stack by executing child node
if (!(child instanceof CLVFVariableLiteral)) {
throw new TransformLangExecutorRuntimeException(node,
"postfix expression is allowed only on variable");
}
TLVariable var = (TLVariable) child.jjtAccept(this, data);
if (var.getType().isNumeric()) {
((TLNumericValue)var.getTLValue()).add(
operatorType == INCR ? Stack.NUM_ONE_P
: Stack.NUM_MINUS_ONE_P);
} else if (var.getType() == TLValueType.DATE) {
stack.calendar.setTime(((TLDateValue)var.getTLValue()).getDate());
stack.calendar
.add(Calendar.DATE, operatorType == INCR ? 1 : -1);
var.getTLValue().setValue(stack.calendar.getTime());
} else {
throw new TransformLangExecutorRuntimeException(node,
"variable [" + var + "] is not of numeric or date type");
}
child.jjtAccept(this, data);
break;
case NOT:
child.jjtAccept(this, data);
val = stack.pop();
if (val.type == TLValueType.BOOLEAN) {
stack.push(val==TLBooleanValue.TRUE ? TLBooleanValue.FALSE : TLBooleanValue.TRUE);
} else {
throw new TransformLangExecutorRuntimeException(node,
new Object[] { val },
"logical condition does not evaluate to BOOLEAN value");
}
break;
case MINUS:
child.jjtAccept(this, data);
val = stack.pop();
if (val.type.isNumeric()) {
val = val.duplicate();
((TLNumericValue)val).neg();
stack.push(val);
} else {
throw new TransformLangExecutorRuntimeException(node,
new Object[] { val }, "variable is not of numeric type");
}
break;
case PLUS:
child.jjtAccept(this, data);
val = stack.pop();
if (val.type.isNumeric()) {
val = val.duplicate();
((TLNumericValue)val).abs();
stack.push(val);
} else {
throw new TransformLangExecutorRuntimeException(node,
new Object[] { val }, "variable is not of numeric type");
}
break;
default:
throw new TransformLangExecutorRuntimeException(node,
"unsupported operation");
}
return data;
}
public Object visit(CLVFListOfLiterals node, Object data) {
stack.push(node.value);
return data;
}
}
| cloveretl.engine/src/org/jetel/interpreter/TransformLangExecutor.java | /*
* jETeL/CloverETL - Java based ETL application framework.
* Copyright (c) Javlin, a.s. ([email protected])
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.jetel.interpreter;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import java.util.regex.Matcher;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jetel.data.DataField;
import org.jetel.data.DataRecord;
import org.jetel.data.NullRecord;
import org.jetel.data.lookup.Lookup;
import org.jetel.data.primitive.CloverInteger;
import org.jetel.data.primitive.DecimalFactory;
import org.jetel.exception.BadDataFormatException;
import org.jetel.exception.ComponentNotReadyException;
import org.jetel.graph.TransformationGraph;
import org.jetel.graph.dictionary.Dictionary;
import org.jetel.graph.dictionary.StringDictionaryType;
import org.jetel.interpreter.ASTnode.CLVFAddNode;
import org.jetel.interpreter.ASTnode.CLVFAnd;
import org.jetel.interpreter.ASTnode.CLVFAssignment;
import org.jetel.interpreter.ASTnode.CLVFBlock;
import org.jetel.interpreter.ASTnode.CLVFBreakStatement;
import org.jetel.interpreter.ASTnode.CLVFBreakpointNode;
import org.jetel.interpreter.ASTnode.CLVFCaseExpression;
import org.jetel.interpreter.ASTnode.CLVFComparison;
import org.jetel.interpreter.ASTnode.CLVFContinueStatement;
import org.jetel.interpreter.ASTnode.CLVFDictionaryNode;
import org.jetel.interpreter.ASTnode.CLVFDirectMapping;
import org.jetel.interpreter.ASTnode.CLVFDivNode;
import org.jetel.interpreter.ASTnode.CLVFDoStatement;
import org.jetel.interpreter.ASTnode.CLVFEvalNode;
import org.jetel.interpreter.ASTnode.CLVFForStatement;
import org.jetel.interpreter.ASTnode.CLVFForeachStatement;
import org.jetel.interpreter.ASTnode.CLVFFunctionCallStatement;
import org.jetel.interpreter.ASTnode.CLVFFunctionDeclaration;
import org.jetel.interpreter.ASTnode.CLVFIfStatement;
import org.jetel.interpreter.ASTnode.CLVFIffNode;
import org.jetel.interpreter.ASTnode.CLVFImportSource;
import org.jetel.interpreter.ASTnode.CLVFIncrDecrStatement;
import org.jetel.interpreter.ASTnode.CLVFInputFieldLiteral;
import org.jetel.interpreter.ASTnode.CLVFIsNullNode;
import org.jetel.interpreter.ASTnode.CLVFListOfLiterals;
import org.jetel.interpreter.ASTnode.CLVFLiteral;
import org.jetel.interpreter.ASTnode.CLVFLookupNode;
import org.jetel.interpreter.ASTnode.CLVFMinusNode;
import org.jetel.interpreter.ASTnode.CLVFModNode;
import org.jetel.interpreter.ASTnode.CLVFMulNode;
import org.jetel.interpreter.ASTnode.CLVFNVL2Node;
import org.jetel.interpreter.ASTnode.CLVFNVLNode;
import org.jetel.interpreter.ASTnode.CLVFOperator;
import org.jetel.interpreter.ASTnode.CLVFOr;
import org.jetel.interpreter.ASTnode.CLVFOutputFieldLiteral;
import org.jetel.interpreter.ASTnode.CLVFPostfixExpression;
import org.jetel.interpreter.ASTnode.CLVFPrintErrNode;
import org.jetel.interpreter.ASTnode.CLVFPrintLogNode;
import org.jetel.interpreter.ASTnode.CLVFPrintStackNode;
import org.jetel.interpreter.ASTnode.CLVFRaiseErrorNode;
import org.jetel.interpreter.ASTnode.CLVFRegexLiteral;
import org.jetel.interpreter.ASTnode.CLVFReturnStatement;
import org.jetel.interpreter.ASTnode.CLVFSequenceNode;
import org.jetel.interpreter.ASTnode.CLVFStart;
import org.jetel.interpreter.ASTnode.CLVFStartExpression;
import org.jetel.interpreter.ASTnode.CLVFStatementExpression;
import org.jetel.interpreter.ASTnode.CLVFSubNode;
import org.jetel.interpreter.ASTnode.CLVFSwitchStatement;
import org.jetel.interpreter.ASTnode.CLVFSymbolNameExp;
import org.jetel.interpreter.ASTnode.CLVFTryCatchStatement;
import org.jetel.interpreter.ASTnode.CLVFUnaryExpression;
import org.jetel.interpreter.ASTnode.CLVFVarDeclaration;
import org.jetel.interpreter.ASTnode.CLVFVariableLiteral;
import org.jetel.interpreter.ASTnode.CLVFWhileStatement;
import org.jetel.interpreter.ASTnode.CLVFWildCardMapping;
import org.jetel.interpreter.ASTnode.Node;
import org.jetel.interpreter.ASTnode.SimpleNode;
import org.jetel.interpreter.data.TLBooleanValue;
import org.jetel.interpreter.data.TLByteArrayValue;
import org.jetel.interpreter.data.TLContainerValue;
import org.jetel.interpreter.data.TLDateValue;
import org.jetel.interpreter.data.TLListValue;
import org.jetel.interpreter.data.TLMapValue;
import org.jetel.interpreter.data.TLNullValue;
import org.jetel.interpreter.data.TLNumericValue;
import org.jetel.interpreter.data.TLObjectValue;
import org.jetel.interpreter.data.TLRecordValue;
import org.jetel.interpreter.data.TLStringValue;
import org.jetel.interpreter.data.TLValue;
import org.jetel.interpreter.data.TLValueType;
import org.jetel.interpreter.data.TLVariable;
import org.jetel.metadata.DataFieldMetadata;
import org.jetel.metadata.DataRecordMetadata;
import org.jetel.util.string.CharSequenceReader;
import edu.umd.cs.findbugs.annotations.SuppressWarnings;
/**
* Executor of FilterExpression parse tree.
*
* @author dpavlis
* @since 16.9.2004
*
* Executor of FilterExpression parse tree
*/
public class TransformLangExecutor implements TransformLangParserVisitor,
TransformLangParserConstants{
public static final int BREAK_BREAK=1;
public static final int BREAK_CONTINUE=2;
public static final int BREAK_RETURN=3;
protected Stack stack;
protected boolean breakFlag;
protected int breakType;
protected Properties globalParameters;
protected DataRecord[] inputRecords;
protected DataRecord[] outputRecords;
protected Node emptyNode; // used as replacement for empty statements
protected TransformationGraph graph;
protected Log runtimeLogger;
protected ExpParser parser;
static Log logger = LogFactory.getLog(TransformLangExecutor.class);
Map<String, Lookup> lookups = new HashMap<String, Lookup>();
/**
* Constructor
*/
public TransformLangExecutor(Properties globalParameters) {
stack = new Stack();
breakFlag = false;
this.globalParameters=globalParameters;
emptyNode = new SimpleNode(Integer.MAX_VALUE);
}
public TransformLangExecutor() {
this(null);
}
public TransformationGraph getGraph() {
return graph;
}
public void setGraph(TransformationGraph graph) {
this.graph = graph;
}
public Log getRuntimeLogger() {
return runtimeLogger;
}
public void setRuntimeLogger(Log runtimeLogger) {
this.runtimeLogger = runtimeLogger;
}
/**
* Set input data records for processing.<br>
* Referenced input data fields will be resolved from
* these data records.
*
* @param inputRecords array of input data records carrying values
*/
@SuppressWarnings(value="EI2")
public void setInputRecords(DataRecord[] inputRecords){
this.inputRecords=inputRecords;
for (DataRecord record : this.inputRecords) {
if (record == null) record = NullRecord.NULL_RECORD;
}
}
/**
* Set output data records for processing.<br>
* Referenced output data fields will be resolved from
* these data records - assignment (in code) to output data field
* will result in assignment to one of these data records.
*
* @param outputRecords array of output data records for setting values
*/
@SuppressWarnings(value="EI2")
public void setOutputRecords(DataRecord[] outputRecords){
this.outputRecords=outputRecords;
}
/**
* Set global parameters which may be reference from within the
* transformation source code
*
* @param parameters
*/
public void setGlobalParameters(Properties parameters){
this.globalParameters=parameters;
}
/**
* Allows to store parameter/value on stack from
* where it can be read by executed script/function.
* @param obj Object/value to be stored
* @since 10.12.2006
*/
public void setParameter(String obj){
stack.push(new TLStringValue(obj));
}
/**
* Method which returns result of executing parse tree.<br>
* Basically, it returns whatever object was left on top of executor's
* stack (usually as a result of last executed expression/operation).<br>
* It can be called repetitively in order to read all objects from stack.
*
* @return Object saved on stack or NULL if no more objects are available
*/
public TLValue getResult() {
return stack.pop();
}
/**
* Return value of globally defined variable determined by slot number.
* Slot can be obtained by calling <code>TransformLangParser.getGlobalVariableSlot(<i>varname</i>)</code>
*
* @param varSlot
* @return Object - depending of Global variable type
* @since 6.12.2006
*/
public TLVariable getGlobalVariable(int varSlot){
return stack.getGlobalVar(varSlot);
}
/**
* Allows to set value of defined global variable.
*
* @param varSlot
* @param value
* @since 6.12.2006
*/
public void setGlobalVariable(int varSlot,TLVariable value){
stack.storeGlobalVar(varSlot,value);
}
/**
* Allows to set parser which may be used in "evaL"
*
* @param parser
*/
public void setParser(ExpParser parser){
this.parser=parser;
}
/* *********************************************************** */
/* implementation of visit methods for each class of AST node */
/* *********************************************************** */
/* it seems to be necessary to define a visit() method for SimpleNode */
public Object visit(SimpleNode node, Object data) {
// throw new TransformLangExecutorRuntimeException(node,
// "Error: Call to visit for SimpleNode");
return data;
}
public Object visit(CLVFStart node, Object data) {
int i, k = node.jjtGetNumChildren();
for (i = 0; i < k; i++)
node.jjtGetChild(i).jjtAccept(this, data);
return data; // this value is ignored in this example
}
public Object visit(CLVFStartExpression node, Object data) {
int i, k = node.jjtGetNumChildren();
for (i = 0; i < k; i++)
node.jjtGetChild(i).jjtAccept(this, data);
return data; // this value is ignored in this example
}
public Object visit(CLVFOr node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue a=stack.pop();
if (a.type!=TLValueType.BOOLEAN){
Object params[]=new Object[]{a};
throw new TransformLangExecutorRuntimeException(node,params,"logical condition does not evaluate to BOOLEAN value");
}else if (a==TLBooleanValue.TRUE){
stack.push(TLBooleanValue.TRUE);
return data;
}
node.jjtGetChild(1).jjtAccept(this, data);
a=stack.pop();
if (a.type!=TLValueType.BOOLEAN){
Object params[]=new Object[]{a};
throw new TransformLangExecutorRuntimeException(node,params,"logical condition does not evaluate to BOOLEAN value");
}
stack.push( a==TLBooleanValue.TRUE ? TLBooleanValue.TRUE : TLBooleanValue.FALSE);
return data;
}
public Object visit(CLVFAnd node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue a=stack.pop();
if (a.type!=TLValueType.BOOLEAN){
Object params[]=new Object[]{a};
throw new TransformLangExecutorRuntimeException(node,params,"logical condition does not evaluate to BOOLEAN value");
}else if (a==TLBooleanValue.FALSE){
stack.push(TLBooleanValue.FALSE);
return data;
}
node.jjtGetChild(1).jjtAccept(this, data);
a=stack.pop();
if (a.type!=TLValueType.BOOLEAN){
Object params[]=new Object[]{a};
throw new TransformLangExecutorRuntimeException(node,params,"logical condition does not evaluate to BOOLEAN value");
}
stack.push(a==TLBooleanValue.TRUE ? TLBooleanValue.TRUE : TLBooleanValue.FALSE);
return data;
}
public Object visit(CLVFComparison node, Object data) {
int cmpResult = 2;
boolean lValue = false;
TLValue a;
TLValue b;
switch(node.cmpType){
case REGEX_EQUAL:
// special handling for Regular expression
node.jjtGetChild(0).jjtAccept(this, data);
TLValue field1 = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
TLValue field2 = stack.pop();
if (field1.type == TLValueType.STRING
&& field2.getValue() instanceof Matcher) {
Matcher regex = (Matcher) field2.getValue();
regex.reset(((TLStringValue)field1).getCharSequence());
if (regex.matches()) {
lValue = true;
} else {
lValue = false;
}
} else {
Object[] arguments = { field1, field2 };
throw new TransformLangExecutorRuntimeException(node,
arguments, "regex equal - wrong type of literal(s)");
}
break;
case IN_OPER:
// other types of comparison
TLContainerValue list=null;
node.jjtGetChild(0).jjtAccept(this, data);
a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
b = stack.pop();
try{
list = (TLContainerValue)b;
}catch(Exception ex){
Object[] arguments = { a, b};
throw new TransformLangExecutorRuntimeException(node,
arguments, "in - wrong type of literal(s)");
}
// SPECIAL hanadling of IN in case a is NULL
if (a==TLNullValue.getInstance()){
stack.push(TLBooleanValue.FALSE);
return data;
}
try{
lValue=list.contains(a);
}catch(Exception ex){
Object[] arguments = { a, b };
throw new TransformLangExecutorRuntimeException(node,
arguments, "in - incompatible literals/expressions");
}
break;
default:
// other types of comparison
node.jjtGetChild(0).jjtAccept(this, data);
a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
b = stack.pop();
if (!a.type.isCompatible(b.type)) {
// SPECIAL hanadling of EQUAL in case a is NULL
if (a==TLNullValue.getInstance() && node.cmpType==EQUAL){
stack.push(TLBooleanValue.FALSE);
return data;
}
Object arguments[] = { a, b };
throw new TransformLangExecutorRuntimeException(node,
arguments,
"compare - incompatible literals/expressions");
}
switch (a.type) {
case INTEGER:
case LONG:
case NUMBER:
case DECIMAL:
case DATE:
case STRING:
case LIST:
case MAP:
case RECORD:
try{
cmpResult = a.compareTo(b);
}catch(Exception ex){
Object arguments[] = { a, b };
throw new TransformLangExecutorRuntimeException(node,
arguments,
"compare - error during comparison of literals/expressions");
}
break;
case BOOLEAN:
if (node.cmpType == EQUAL || node.cmpType == NON_EQUAL) {
cmpResult = a.equals(b) ? 0 : -1;
} else {
Object arguments[] = { a, b };
throw new TransformLangExecutorRuntimeException(node,
arguments,
"compare - unsupported comparison operator ["
+ tokenImage[node.cmpType]
+ "] for literals/expressions");
}
break;
default:
Object arguments[] = { a, b };
throw new TransformLangExecutorRuntimeException(node,
arguments,
"compare - don't know how to compare literals/expressions");
}
switch (node.cmpType) {
case EQUAL:
if (cmpResult == 0) {
lValue = true;
}
break;// equal
case LESS_THAN:
if (cmpResult == -1) {
lValue = true;
}
break;// less than
case GREATER_THAN:
if (cmpResult == 1) {
lValue = true;
}
break;// grater than
case LESS_THAN_EQUAL:
if (cmpResult <= 0) {
lValue = true;
}
break;// less than equal
case GREATER_THAN_EQUAL:
if (cmpResult >= 0) {
lValue = true;
}
break;// greater than equal
case NON_EQUAL:
if (cmpResult != 0) {
lValue = true;
}
break;
default:
// this should never happen !!!
logger
.fatal("Internal error: Unsupported comparison operator !");
throw new RuntimeException(
"Internal error - Unsupported comparison operator !");
}
}
stack.push(lValue ? TLBooleanValue.TRUE : TLBooleanValue.FALSE);
return data;
}
public Object visit(CLVFAddNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
TLValue b = stack.pop();
if (node.nodeVal==null) {
if (a!=TLNullValue.getInstance()){
node.nodeVal=a.duplicate();
}else if (b!=TLNullValue.getInstance()){
node.nodeVal=b.duplicate();
}else{
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"add - NULL values not allowed");
}
}
try {
if (a.type.isNumeric() && b.type.isNumeric()) {
node.nodeVal.setValue(a);
((TLNumericValue)node.nodeVal).add(((TLNumericValue)b).getNumeric());
stack.push(node.nodeVal);
} else if (a.type==TLValueType.DATE && b.type.isNumeric()) {
Calendar result = Calendar.getInstance();
result.setTime(((TLDateValue)a).getDate());
result.add(Calendar.DATE, ((TLNumericValue)b).getInt());
((TLDateValue)node.nodeVal).getDate().setTime(result.getTimeInMillis());
stack.push(node.nodeVal);
} else if (a.type==TLValueType.STRING) {
//CharSequence a1 = ((TLStringValue)a).getCharSequence();
StringBuilder buf=(StringBuilder)node.nodeVal.getValue();
buf.setLength(0);
buf.append(a.getValue());
if (b.type==TLValueType.STRING) {
buf.append(b.getValue());
} else {
buf.append(b);
}
stack.push(node.nodeVal);
} else {
Object[] arguments = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"add - wrong type of literal(s)");
}
} catch (ClassCastException ex) {
Object arguments[] = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"add - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFSubNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
TLValue b = stack.pop();
if (a==TLNullValue.getInstance() || b==TLNullValue.getInstance()) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"sub - NULL value not allowed");
}
if (!b.type.isNumeric()) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { b },
"sub - wrong type of literal");
}
if (node.nodeVal==null) {
node.nodeVal=a.duplicate();
}
if(a.type.isNumeric()) {
node.nodeVal.setValue(a);
((TLNumericValue)node.nodeVal).sub(((TLNumericValue)b).getNumeric());
stack.push(node.nodeVal);
} else if (a.type==TLValueType.DATE) {
Calendar result = Calendar.getInstance();
result.setTime(((TLDateValue)a).getDate());
result.add(Calendar.DATE, ((TLNumericValue)b).getInt() * -1);
((TLDateValue)node.nodeVal).getDate().setTime(result.getTimeInMillis());
stack.push(node.nodeVal);
} else {
Object[] arguments = { a, b };
throw new TransformLangExecutorRuntimeException(node,arguments,
"sub - wrong type of literal(s)");
}
return data;
}
public Object visit(CLVFMulNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
TLValue b = stack.pop();
if (a==TLNullValue.getInstance()|| b==TLNullValue.getInstance()) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"mul - NULL value not allowed");
}
if (!a.type.isNumeric() && !b.type.isNumeric()) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a,b },
"mul - wrong type of literals");
}
if (node.nodeVal==null) {
node.nodeVal=a.duplicate();
}else{
node.nodeVal.setValue(a);
}
((TLNumericValue)node.nodeVal).mul(((TLNumericValue)b).getNumeric()); //TODO: hack due to IntegerDecimal problem..
stack.push(node.nodeVal);
return data;
}
public Object visit(CLVFDivNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
TLValue b = stack.pop();
if (a==TLNullValue.getInstance()|| b==TLNullValue.getInstance()) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"div - NULL value not allowed");
}
if (!a.type.isNumeric() && !b.type.isNumeric()) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a,b },
"div - wrong type of literals");
}
if (node.nodeVal==null || node.nodeVal.type!=a.type) {
node.nodeVal=a.duplicate();
}else{
node.nodeVal.setValue(a);
}
try {
((TLNumericValue)node.nodeVal).div(((TLNumericValue)b).getNumeric()); //TODO: hack due to IntegerDecimal problem.
}catch(ArithmeticException ex){
throw new TransformLangExecutorRuntimeException(node, new Object[] { a,b },
"div - arithmetic exception",ex);
}catch (Exception ex) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a,b },
"div - error during operation",ex);
}
stack.push(node.nodeVal);
return data;
}
public Object visit(CLVFModNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue a = stack.pop();
node.jjtGetChild(1).jjtAccept(this, data);
TLValue b = stack.pop();
if (a==TLNullValue.getInstance()|| b==TLNullValue.getInstance()) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"mod - NULL value not allowed");
}
if (!a.type.isNumeric() && !b.type.isNumeric()) {
throw new TransformLangExecutorRuntimeException(node, new Object[] { a, b },
"mod - wrong type of literals");
}
if (node.nodeVal==null) {
node.nodeVal=a.duplicate();
}else{
node.nodeVal.setValue(a);
}
((TLNumericValue)node.nodeVal).mod(((TLNumericValue)b).getNumeric()); //TODO: hack due to IntegerDecimal problem.
stack.push(node.nodeVal);
return data;
}
public Object visit(CLVFIsNullNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue value = stack.pop();
if (value==TLNullValue.getInstance()) {
stack.push(TLBooleanValue.TRUE);
} else {
if (value.type==TLValueType.STRING) {
stack.push( ((TLStringValue)value).getCharSequence().length()==0 ? TLBooleanValue.TRUE : TLBooleanValue.FALSE);
}else {
stack.push(TLBooleanValue.FALSE);
}
}
return data;
}
public Object visit(CLVFNVLNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue value = stack.pop();
if (value==TLNullValue.getInstance()) {
node.jjtGetChild(1).jjtAccept(this, data);
} else {
if (value.type==TLValueType.STRING && ((TLStringValue)value).length()==0) {
node.jjtGetChild(1).jjtAccept(this, data);
}else {
stack.push(value);
}
}
return data;
}
public Object visit(CLVFNVL2Node node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue value = stack.pop();
if (value==TLNullValue.getInstance() || (value.type==TLValueType.STRING && ((TLStringValue)value).length()==0)) {
node.jjtGetChild(2).jjtAccept(this, data);
} else {
node.jjtGetChild(1).jjtAccept(this, data);
}
return data;
}
public Object visit(CLVFLiteral node, Object data) {
stack.push(node.value);
return data;
}
public Object visit(CLVFInputFieldLiteral node, Object data) {
if (inputRecords == null) {
throw new TransformLangExecutorRuntimeException(node, "Cannot access input fields within this scope!");
}
DataRecord record = inputRecords[node.recordNo];
int fieldNo=-1;
if (record == NullRecord.NULL_RECORD || record == null) {
stack.push(TLNullValue.getInstance());
return null;
}
if (node.indexSet){
node.childrenAccept(this, data);
TLValue val=stack.pop();
try{
fieldNo=val.getNumeric().getInt();
}catch(Exception ex){
throw new TransformLangExecutorRuntimeException(node,new Object[] {val},"invalid field index");
}
}
if (node.fieldNo < 0) { // record context
if (node.value == null) {
if (node.indexSet){
try{
node.value = TLValue.convertValue(record.getField(fieldNo));
}catch(Exception ex){
throw new TransformLangExecutorRuntimeException(node, "field index ("+fieldNo +") out of bounds");
}
}else{
node.value = new TLRecordValue(record);
}
} else {
if (node.indexSet){
try{
node.value = TLValue.convertValue(record.getField(fieldNo));
}catch(Exception ex){
throw new TransformLangExecutorRuntimeException(node, "field index ("+fieldNo +") out of bounds");
}
}else{
node.value.setValue(record);
}
}
stack.push(node.value);
// we return reference to DataRecord so we can
// perform extra checking in special cases
return record;
} else {
node.field = record.getField(node.fieldNo);
if (node.field.isNull()) {
stack.push(TLNullValue.getInstance());
return null;
}
if (node.value == null || node.field.getMetadata().getType() == DataFieldMetadata.BOOLEAN_FIELD) {
// since TLBooleanValue is immutable, we have to pass correct reference
node.value = TLValue.convertValue(node.field);
} else {
node.value.setValue(node.field);
}
stack.push(node.value);
// we return reference to DataField so we can
// perform extra checking in special cases
return node.field;
}
}
public Object visit(CLVFOutputFieldLiteral node, Object data) {
//stack.push(inputRecords[node.recordNo].getField(node.fieldNo));
// we return reference to DataField so we can
// perform extra checking in special cases
return data;
}
public Object visit(CLVFRegexLiteral node, Object data) {
stack.push(new TLObjectValue(node.matcher));
return data;
}
public Object visit(CLVFMinusNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue value = stack.pop();
if (value.type.isNumeric()) {
TLNumericValue newVal=(TLNumericValue)value.duplicate();
newVal.getNumeric().mul(Stack.NUM_MINUS_ONE_P);
stack.push(newVal);
} else {
Object arguments[] = { value };
throw new TransformLangExecutorRuntimeException(node,arguments,
"minus - not a number");
}
return data;
}
public Object visit(CLVFIffNode node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
TLValue condition = stack.pop();
if (condition.type==TLValueType.BOOLEAN) {
if (condition==TLBooleanValue.TRUE) {
node.jjtGetChild(1).jjtAccept(this, data);
} else {
node.jjtGetChild(2).jjtAccept(this, data);
}
stack.push(stack.pop());
} else {
Object[] arguments = { condition };
throw new TransformLangExecutorRuntimeException(node,arguments,
"iif - condition does not evaluate to BOOLEAN value");
}
return data;
}
public Object visit(CLVFPrintErrNode node, Object data) {
node.childrenAccept(this, data);
boolean printLocationFlag = false;
// interpret optional parameter
if (node.jjtGetNumChildren() > 1) {
TLValue printLocation = stack.pop();
if (printLocation.type != TLValueType.BOOLEAN) {
throw new TransformLangExecutorRuntimeException(node,new Object[]{printLocation},
"print_err - the second argument does not evaluate to a BOOLEAN value");
}
printLocationFlag = (Boolean)printLocation.getValue();
}
TLValue message = stack.pop();
if (printLocationFlag) {
StringBuilder buf=new StringBuilder((message != null ? message.toString() : "<null>"));
buf.append(" (on line: ").append(node.getLineNumber());
buf.append(" col: ").append(node.getColumnNumber()).append(")");
System.err.println(buf);
}else{
System.err.println(message != null ? message : "<null>");
}
return data;
}
public Object visit(CLVFPrintStackNode node, Object data) {
for (int i=stack.top;i>=0;i--){
System.err.println("["+i+"] : "+stack.stack[i]);
}
System.out.println("** list of local variables ***");
for (int i=0;i<stack.localVarCounter;i++)
System.out.println(stack.localVarSlot[stack.localVarSlotOffset+i]);
return data;
}
/***************************************************************************
* Transformation Language executor starts here.
**************************************************************************/
public Object visit(CLVFForStatement node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data); // set up of the loop
boolean condition = false;
Node loopCondition = node.jjtGetChild(1);
Node increment = node.jjtGetChild(2);
Node body;
try{
body=node.jjtGetChild(3);
}catch(ArrayIndexOutOfBoundsException ex){
body=emptyNode;
}
loopCondition.jjtAccept(this, data); // evaluate the condition
TLValue conVal=stack.pop();
try{
if (conVal.type!=TLValueType.BOOLEAN)
throw new TransformLangExecutorRuntimeException(node,"loop condition does not evaluate to BOOLEAN value");
condition = (conVal==TLBooleanValue.TRUE);
}catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid condition");
}
// loop execution
while (condition) {
body.jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_BREAK || breakType==BREAK_RETURN) {
return data;
}
}
increment.jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// evaluate the condition
loopCondition.jjtAccept(this, data);
condition = (stack.pop()==TLBooleanValue.TRUE);
}
return data;
}
public Object visit(CLVFForeachStatement node, Object data) {
CLVFVariableLiteral varNode=(CLVFVariableLiteral)node.jjtGetChild(0);
CLVFVariableLiteral arrayNode=(CLVFVariableLiteral)node.jjtGetChild(1);
TLVariable variableToAssign = stack.getVar(varNode.localVar,
varNode.varSlot);
TLVariable arrayVariable=stack.getVar(arrayNode.localVar,
arrayNode.varSlot);
Node body;
try{
body=node.jjtGetChild(2);
}catch(ArrayIndexOutOfBoundsException ex){
body=emptyNode;
}
switch(arrayVariable.getType()) {
case LIST:
case RECORD:
case BYTE:
TLContainerValue container=(TLContainerValue)arrayVariable.getTLValue();
for(int i=0; i<container.getLength();i++) {
variableToAssign.setTLValue(container.getStoredValue(i));
body.jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_BREAK || breakType==BREAK_RETURN) {
return data;
}
}
}
break;
case MAP:
Iterator<TLValue> iter = ((TLContainerValue)arrayVariable.getTLValue()).getCollection().iterator();
while(iter.hasNext()) {
variableToAssign.setTLValue(iter.next());
body.jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_BREAK || breakType==BREAK_RETURN) {
return data;
}
}
}
break;
default:
throw new TransformLangExecutorRuntimeException(node,"not a Map/List/Record/ByteArray variable");
}
return data;
}
public Object visit(CLVFWhileStatement node, Object data) {
boolean condition = false;
Node loopCondition = node.jjtGetChild(0);
Node body;
try{
body=node.jjtGetChild(1);
}catch(ArrayIndexOutOfBoundsException ex){
body=emptyNode;
}
loopCondition.jjtAccept(this, data); // evaluate the condition
TLValue conVal=stack.pop();
try{
if (conVal.type!=TLValueType.BOOLEAN)
throw new TransformLangExecutorRuntimeException(node,"loop condition does not evaluate to BOOLEAN value");
condition = (conVal==TLBooleanValue.TRUE);
}catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid condition");
}
// loop execution
while (condition) {
body.jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_BREAK || breakType==BREAK_RETURN) return data;
}
// evaluate the condition
loopCondition.jjtAccept(this, data);
condition = (stack.pop()==TLBooleanValue.TRUE);
}
return data;
}
public Object visit(CLVFIfStatement node, Object data) {
boolean condition = false;
node.jjtGetChild(0).jjtAccept(this, data); // evaluate the
TLValue conVal=stack.pop();
try{
if (conVal.type!=TLValueType.BOOLEAN)
throw new TransformLangExecutorRuntimeException(node,"if condition does not evaluate to BOOLEAN value");
condition = (conVal==TLBooleanValue.TRUE);
} catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid condition");
}
// first if
if (condition) {
node.jjtGetChild(1).jjtAccept(this, data);
} else { // if else part exists
if (node.jjtGetNumChildren() > 2) {
node.jjtGetChild(2).jjtAccept(this, data);
}
}
return data;
}
public Object visit(CLVFDoStatement node, Object data) {
boolean condition = false;
Node loopCondition = node.jjtGetChild(1);
Node body = node.jjtGetChild(0);
// loop execution
do {
body.jjtAccept(this, data);
stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_BREAK || breakType==BREAK_RETURN) return data;
}
// evaluate the condition
loopCondition.jjtAccept(this, data);
TLValue conVal=stack.pop();
try{
if (conVal.type!=TLValueType.BOOLEAN)
throw new TransformLangExecutorRuntimeException(node,"loop condition does not evaluate to BOOLEAN value");
condition = (conVal==TLBooleanValue.TRUE);
}catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid condition");
}
} while (condition);
return data;
}
public Object visit(CLVFSwitchStatement node, Object data) {
// get value of switch && push/leave it on stack
boolean match=false;
node.jjtGetChild(0).jjtAccept(this, data);
TLValue switchVal=stack.pop();
int numChildren = node.jjtGetNumChildren();
int numCases = node.hasDefaultClause ? numChildren-1 : numChildren;
// loop over remaining case statements
for (int i = 1; i < numCases; i++) {
stack.push(switchVal);
if (node.jjtGetChild(i).jjtAccept(this, data)==TLBooleanValue.TRUE){
match=true;
}
if (breakFlag) {
if (breakType == BREAK_BREAK) {
breakFlag = false;
}
break;
}
}
// test whether execute default branch
if (node.hasDefaultClause && !match){
node.jjtGetChild(numChildren-1).jjtAccept(this, data);
}
return data;
}
public Object visit(CLVFCaseExpression node, Object data) {
// test if literal (as child 0) is equal to data on stack
// if so, execute block (child 1)
boolean match = false;
TLValue switchVal = stack.pop();
node.jjtGetChild(0).jjtAccept(this, data);
TLValue value = stack.pop();
try {
match=(switchVal.compareTo(value)==0);
} catch (ClassCastException ex) {
Object[] args=new Object[] {switchVal,value};
throw new TransformLangExecutorRuntimeException(node,args,"incompatible literals in case clause");
}catch (NullPointerException ex){
throw new TransformLangExecutorRuntimeException(node,"missing or invalid case value");
}catch (IllegalArgumentException ex){
Object[] args=new Object[] {switchVal,value};
throw new TransformLangExecutorRuntimeException(node,args,"incompatible literals in case clause");
}
if (match){
node.jjtGetChild(1).jjtAccept(this, data);
return TLBooleanValue.TRUE;
}
return TLBooleanValue.FALSE;
}
public Object visit(CLVFTryCatchStatement node, Object data) {
try {
node.jjtGetChild(0).jjtAccept(this, data); // evaluate the
} catch (Exception ex) {
if (node.jjtGetNumChildren() > 2) {
// populate chosen variable with exception name
CLVFVariableLiteral varLit = (CLVFVariableLiteral) node.jjtGetChild(1);
TLVariable var = stack.getVar(varLit.localVar, varLit.varSlot);
if (var.getType() != TLValueType.STRING) {
throw new TransformLangExecutorRuntimeException(node, "variable \"" + var.getName() + "\" is not of type string in catch() block");
}
var.getTLValue().setValue(ex.getCause()== null ? ex.getClass().getName() : ex.getCause().getClass().getName());
// call the catch block when variable is present
node.jjtGetChild(2).jjtAccept(this, data);
} else {
// call the catch block - simple variant
node.jjtGetChild(1).jjtAccept(this, data);
}
}
return data;
}
public Object visit(CLVFIncrDecrStatement node, Object data) {
Node childNode = node.jjtGetChild(0);
CLVFVariableLiteral varNode=(CLVFVariableLiteral) childNode;
TLVariable var=stack.getVar(varNode.localVar, varNode.varSlot);
if (var.getType().isNumeric()) {
((TLNumericValue)var.getTLValue()).getNumeric().add( node.kind==INCR ? Stack.NUM_ONE_P : Stack.NUM_MINUS_ONE_P);
}else if (var.getType()==TLValueType.DATE) {
stack.calendar.setTime(((TLDateValue)var.getTLValue()).getDate());
stack.calendar.add(Calendar.DATE, node.kind == INCR ? 1 : -1);
var.getTLValue().setValue(stack.calendar.getTime());
}else {
throw new TransformLangExecutorRuntimeException(node,"variable is not of numeric or date type");
}
return data;
}
public Object visit(CLVFBlock node, Object data) {
int childern = node.jjtGetNumChildren();
for (int i = 0; i < childern; i++) {
node.jjtGetChild(i).jjtAccept(this, data);
// have we seen contiue/break/return statement ??
if (breakFlag){
if (breakType!=BREAK_RETURN)
stack.pop();
return data;
}
stack.pop();
}
return data;
}
/*
* Loop & block & function control nodes
*/
public Object visit(CLVFBreakStatement node, Object data) {
breakFlag = true; // we encountered break statement;
breakType=BREAK_BREAK;
return data;
}
public Object visit(CLVFContinueStatement node, Object data) {
breakFlag = true; // we encountered continue statement;
breakType= BREAK_CONTINUE;
return data;
}
public Object visit(CLVFReturnStatement node, Object data) {
if (node.jjtHasChildren()){
node.jjtGetChild(0).jjtAccept(this, data);
}
breakFlag = true;
breakType = BREAK_RETURN;
return data;
}
public Object visit(CLVFBreakpointNode node, Object data) {
// list all variables
System.err.println("** list of global variables ***");
for (int i=0;i<stack.globalVarSlot.length;System.out.println(stack.globalVarSlot[i++]));
System.err.println("** list of local variables ***");
for (int i=0;i<stack.localVarCounter;i++)
System.out.println(stack.localVarSlot[stack.localVarSlotOffset+i]);
return data;
}
/*
* Variable declarations
*/
public Object visit(CLVFVarDeclaration node, Object data) {
TLValue value=null;
// create global/local variable
switch (node.type) {
case INT_VAR:
value = new TLNumericValue(TLValueType.INTEGER);
break;
case LONG_VAR:
value = new TLNumericValue(TLValueType.LONG);
break;
case DOUBLE_VAR:
value = new TLNumericValue(TLValueType.NUMBER);
break;
case DECIMAL_VAR:
{
if (node.length > 0) {
if (node.precision > 0) {
value = new TLNumericValue(TLValueType.DECIMAL, DecimalFactory.getDecimal(node.length,
node.precision));
} else {
value = new TLNumericValue(TLValueType.DECIMAL,DecimalFactory.getDecimal(node.length, 0));
}
} else {
value = new TLNumericValue(TLValueType.DECIMAL,DecimalFactory.getDecimal());
}
}
break;
case STRING_VAR:
value = new TLStringValue();
break;
case DATE_VAR:
value = new TLDateValue();
break;
case BOOLEAN_VAR:
value = TLBooleanValue.getInstance(false);
break;
case BYTE_VAR:
{
if (node.length>0) {
value = new TLByteArrayValue(node.length);
}else {
value = new TLByteArrayValue();
}
}
break;
case LIST_VAR:
{
if (node.length>0) {
value = new TLListValue(node.length);
((TLListValue)value).fill(TLNullValue.getInstance(), node.length);
}else {
value = new TLListValue();
}
}
break;
case MAP_VAR:
{
if (node.length>0){
value = new TLMapValue(node.length);
}else {
value = new TLMapValue();
}
}
break;
case RECORD_VAR:
DataRecordMetadata metadata = null;
if (node.recordNo >= 0) {
metadata = parser.getInRecordMeta(node.recordNo);
} else {
try {
metadata = graph.getDataRecordMetadata(node.metadataId, true);
} catch (Exception ex) {
throw new TransformLangExecutorRuntimeException(node, "error in Record declaration", ex);
}
}
if (metadata == null) {
throw new TransformLangExecutorRuntimeException(node, "record variable declaration - " + "can't find metadata ID \"" + (node.metadataId != null ? node.metadataId : "<unknown ID>") + "\"");
}
value = new TLRecordValue(metadata);
break;
default:
throw new TransformLangExecutorRuntimeException(node,
"variable declaration - "
+ "unknown type for variable \""
+ node.name + "\"");
}
TLVariable variable=new TLVariable(node.name,value);
stack.storeVar(node.localVar, node.varSlot,variable );
if (node.hasInitValue) {
// can have spec node & initialization
node.jjtGetChild(node.jjtGetNumChildren()>1 ? 1 : 0).jjtAccept(this, data);
TLValue initValue = stack.pop();
TLValueType type =variable.getType();
if (type.isCompatible(initValue.type)) {
variable.setTLValue(initValue);
}else {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of \"" + initValue
+ "\" ("+initValue.type +")to variable \"" + node.name
+ "\" ("+type +")- incompatible data types");
}
}
return data;
}
public Object visit(CLVFVariableLiteral node, Object data) {
TLVariable var = stack.getVar(node.localVar, node.varSlot);
TLValue index = null;
if (node.indexSet) {
try {
switch (var.getType()) {
case LIST:
node.jjtGetChild(0).jjtAccept(this, data);
index = stack.pop();
stack.push(((TLContainerValue)var.getTLValue()).getStoredValue(((TLNumericValue)index).getInt()));
break;
case MAP:
node.jjtGetChild(0).jjtAccept(this, data);
index = stack.pop();
stack.push(((TLContainerValue)var.getTLValue()).getStoredValue(index));
break;
case RECORD:
if (node.fieldID != null) {
if (node.arrayIndex == -1) {
node.arrayIndex = ((DataRecord) var
.getTLValue().getValue()).getMetadata()
.getFieldPosition(node.fieldID);
if (node.arrayIndex==-1) {
throw new TransformLangExecutorRuntimeException(
node, "invalid field ID \""
+ node.fieldID
+ "\" of variable \""
+ var.getName() + "\" - type "
+ var.getType().toString());
}
}
stack.push(((TLContainerValue) var.getTLValue())
.getStoredValue(node.arrayIndex));
} else {
node.jjtGetChild(0).jjtAccept(this, data);
index = stack.pop();
stack.push(((TLContainerValue) var.getTLValue())
.getStoredValue(index));
}
break;
case BYTE:
node.jjtGetChild(0).jjtAccept(this, data);
index = stack.pop();
stack.push(((TLContainerValue)var.getTLValue()).getStoredValue(((TLNumericValue)index).getInt()));
break;
default:
throw new TransformLangExecutorRuntimeException(node,"invalid usage if index for variable \""
+ var.getName() + "\" - type "
+ var.getType().toString());
}
}catch (TransformLangExecutorRuntimeException ex1){
throw ex1;
}catch (Exception ex) {
throw new TransformLangExecutorRuntimeException(node,
"invalid index \"" + index + "\" of variable \""
+ var.getName() + "\" - type "
+ var.getType().toString(), ex);
}
}else {
stack.push(var.getTLValue());
}
return var;
}
public Object visit(CLVFAssignment node, Object data) {
CLVFVariableLiteral varNode = (CLVFVariableLiteral) node.jjtGetChild(0);
TLVariable variableToAssign = stack.getVar(varNode.localVar,
varNode.varSlot);
node.jjtGetChild(1).jjtAccept(this, data);
TLValue valueToAssign = stack.pop();
if (valueToAssign==null) {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of null value to variable \"" + varNode.varName+"\"");
}
int actualType = varNode.varType;
/*
* Function parameters are of type OBJECT. This is determined in compile time
* However if the function parameter is passing a value of data record,
* map or list, it will never by assigned correctly.
* Therefore we have to determine the type dynamically in runtime.
*/
if (actualType == OBJECT_VAR) {
TLValueType paramType = variableToAssign.getType(); // retrieve actual type
switch (paramType) {
case RECORD:
actualType = RECORD_VAR;
break;
case MAP:
actualType = MAP_VAR;
break;
case LIST:
actualType = LIST_VAR;
break;
}
}
switch (actualType) {
case LIST_VAR:
TLNumericValue index2List = null;
if (varNode.scalarContext) {
try {
if (varNode.indexSet) {
varNode.jjtGetChild(0).jjtAccept(this, data);
index2List = (TLNumericValue)stack.pop();
variableToAssign
.setTLValue(index2List.getInt(), valueToAssign);
} else {
variableToAssign.setTLValue(-1, valueToAssign);
}
} catch (IndexOutOfBoundsException ex) {
throw new TransformLangExecutorRuntimeException(
node,
"index \""
+ index2List
+ "\" is outside current limits of list/array: \""
+ varNode.varName + "\"", ex);
} catch (Exception ex) {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of \"" + valueToAssign
+ "\" to variable \"" + varNode.varName
+ "\"", ex);
}
} else {
// list context
if (valueToAssign.type.isArray() ) {
variableToAssign.setTLValue(valueToAssign);
} else {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of value \""+valueToAssign+"\" to list/array \""
+ varNode.varName + "\"");
}
}
break;
case RECORD_VAR:
TLValue fieldIndex = null;
if (varNode.scalarContext) {
try {
if (varNode.fieldID != null) {
if (varNode.arrayIndex == -1) {
varNode.arrayIndex = ((DataRecord) variableToAssign
.getTLValue().getValue()).getMetadata()
.getFieldPosition(varNode.fieldID);
// check if the referenced field exists and we were able to resolve it
if (varNode.arrayIndex == -1) {
final String fieldName = varNode.fieldID == null ? "null" : varNode.fieldID;
throw new TransformLangExecutorRuntimeException(node,
"referenced field \"" + fieldName + "\" does not exist");
}
}
((TLContainerValue) variableToAssign.getTLValue())
.setStoredValue(varNode.arrayIndex,
valueToAssign);
} else {
varNode.jjtGetChild(0).jjtAccept(this, data);
fieldIndex = stack.pop();
((TLContainerValue) variableToAssign.getTLValue())
.setStoredValue(fieldIndex, valueToAssign);
}
} catch (Exception ex) {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of \"" + valueToAssign
+ "\" to variable \"" + varNode.varName
+ "\"", ex);
}
} else {
try {
variableToAssign.getTLValue().setValue(valueToAssign);
} catch (Exception ex) {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of \"" + valueToAssign
+ "\" to variable \"" + varNode.varName
+ "\"", ex);
}
}
break;
case MAP_VAR:
TLValue indexMap=null;
if (varNode.scalarContext) {
if (varNode.indexSet) {
try {
varNode.jjtGetChild(0).jjtAccept(this, data);
indexMap = stack.pop();
((TLContainerValue)variableToAssign.getTLValue()).setStoredValue(indexMap,valueToAssign);
} catch (Exception ex) {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of \"" + valueToAssign
+ "\" to variable \"" + varNode.varName
+ "\"", ex);
}
} else {
if (valueToAssign instanceof TLContainerValue)
((TLContainerValue)variableToAssign.getTLValue()).setValue(valueToAssign);
// no key specified,
else throw new TransformLangExecutorRuntimeException(node,
"no key defined when assigning to Map variable \"" + varNode.varName);
}
} else {
try {
variableToAssign.getTLValue().setValue(valueToAssign);
} catch (Exception ex) {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of \"" + valueToAssign
+ "\" to variable \"" + varNode.varName
+ "\"", ex);
}
}
break;
case BYTE_VAR:
TLNumericValue indexByteArray = null;
if (varNode.scalarContext) {
try {
// scalar context
if (varNode.indexSet) {
varNode.jjtGetChild(0).jjtAccept(this, data);
indexByteArray = (TLNumericValue)stack.pop();
((TLContainerValue)variableToAssign.getTLValue())
.setStoredValue(indexByteArray.getInt(), valueToAssign);
} else {
((TLContainerValue)variableToAssign.getTLValue()).setStoredValue(-1, valueToAssign);
}
} catch (IndexOutOfBoundsException ex) {
throw new TransformLangExecutorRuntimeException(
node,
"index \""
+ indexByteArray
+ "\" is outside current limits byte array \""
+ varNode.varName + "\"", ex);
} catch (Exception ex) {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of \"" + valueToAssign
+ "\" to variable \"" + varNode.varName
+ "\"", ex);
}
} else {
// list context
if (valueToAssign.type.isArray() || valueToAssign==TLNullValue.getInstance()) {
variableToAssign.setTLValue(valueToAssign);
} else {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of scalar value to byte array \""
+ varNode.varName + "\"");
}
}
break;
default:
TLValueType type=variableToAssign.getType();
if (type.isCompatible(valueToAssign.type)) {
try{
variableToAssign.setTLValue(valueToAssign);
}catch(Exception ex){
throw new TransformLangExecutorRuntimeException(node,"invalid assignment of \"" + valueToAssign.toString()
+ "\" [" + valueToAssign.type
+ "] to variable \""
+ variableToAssign.getName() + "\" ["
+ variableToAssign.getType()
+ "] \" - "+ex.getMessage(),ex);
}
} else {
throw new TransformLangExecutorRuntimeException(node,
"invalid assignment of \"" + valueToAssign.toString()
+ "\" [" + valueToAssign.type
+ "] to variable \""
+ variableToAssign.getName() + "\" ["
+ variableToAssign.getType()
+ "] \" - incompatible data types");
}
}
return data;
}
public Object visit(CLVFDirectMapping node, Object data) {
DataField field = outputRecords[node.recordNo].getField(node.fieldNo);
TLValue value = null;
switch (node.mappingType) {
case MultipleLiteral2Field:
final int arity = node.arity;
try {
// we try till success or no more options
for (int i = 0; i < arity; i++) {
node.jjtGetChild(i).jjtAccept(this, data);
value = stack.pop();
try {
value.copyToDataField(field);
break; // success during assignment, finish looping
} catch (Exception ex) {
if (i == arity - 1)
throw ex;
}
}
} catch (BadDataFormatException ex) {
if (!outputRecords[node.recordNo].getField(node.fieldNo).getMetadata().isNullable()) {
throw new TransformLangExecutorRuntimeException(node, "can't assign NULL to \"" + node.fieldName + "\"");
}
throw new TransformLangExecutorRuntimeException(node, "bad data when mapping field \"" + node.fieldName + "\" (" + field.getMetadata().getName() + ":" + field.getMetadata().getTypeAsString() + ") - assigning \"" + value + "\" (" + value.type + ")");
} catch (TransformLangExecutorRuntimeException ex) {
throw ex;
} catch (Exception ex) {
String msg = ex.getMessage();
throw new TransformLangExecutorRuntimeException(node, (msg != null ? msg : "") + " when mapping \"" + node.fieldName + "\" (" + DataFieldMetadata.type2Str(field.getType()) + ") - assigning \"" + value + "\" (" + (value != null ? value.getType().getName() : "unknown type") + ")");
}
break;
case Field2Field:
try {
CLVFInputFieldLiteral childNode=((CLVFInputFieldLiteral)node.jjtGetChild(0));
childNode.bindToField(inputRecords);
node.srcField=childNode.field;
field.setValue(node.srcField);
} catch (BadDataFormatException ex) {
if (!outputRecords[node.recordNo].getField(node.fieldNo).getMetadata().isNullable() && node.srcField.isNull()) {
throw new TransformLangExecutorRuntimeException(node, "can't assign NULL to \"" + node.fieldName + "\"");
}else{
throw new TransformLangExecutorRuntimeException(node, "bad data when mapping field \"" + node.fieldName + "\" (" + field.getMetadata().getName() + ":" + field.getMetadata().getTypeAsString() + ") - assigning \"" + node.srcField.toString() +
"\" (" + node.srcField.getMetadata().getName() + ":" + node.srcField.getMetadata().getTypeAsString() +" )");
}
} catch (Exception ex) {
String msg = ex.getMessage();
throw new TransformLangExecutorRuntimeException(node, (msg != null ? msg : "") + " when mapping \"" + node.fieldName + "\" (" + DataFieldMetadata.type2Str(field.getType()) + ") - assigning \"" + value + "\" (" + (value != null ? value.getType().getName() : "unknown type") + ")");
}
break;
case Literal2Field:
try {
node.jjtGetChild(0).jjtAccept(this, data);
value = stack.pop();
value.copyToDataField(field);
} catch (BadDataFormatException ex) {
if (!outputRecords[node.recordNo].getField(node.fieldNo).getMetadata().isNullable()) {
throw new TransformLangExecutorRuntimeException(node, "can't assign NULL to \"" + node.fieldName + "\"",ex);
}
throw new TransformLangExecutorRuntimeException(node, "bad data when mapping field \"" + node.fieldName + "\" (" + field.getMetadata().getName() + ":" + field.getMetadata().getTypeAsString() + ") - assigning \"" + value + "\" (" + value.type + ")",ex);
} catch (TransformLangExecutorRuntimeException ex) {
throw ex;
} catch (Exception ex) {
String msg = ex.getMessage();
throw new TransformLangExecutorRuntimeException(node, (msg != null ? msg : "") + " when mapping \"" + node.fieldName + "\" (" + DataFieldMetadata.type2Str(field.getType()) + ") - assigning \"" + value + "\" (" + (value != null ? value.getType().getName() : "unknown type") + ")");
}
break;
default:
// this should not happen
throw new TransformLangExecutorRuntimeException(node, "unrecognized mapping type (internal error)");
}
return data;
}
public Object visit(CLVFWildCardMapping node, Object data) {
if (!node.initialized) {
try {
node.custTrans.setLogger(logger);
node.custTrans.init(null, parser.getInRecordMetadata(), parser
.getOutRecordMetadata());
} catch (ComponentNotReadyException ex) {
throw new TransformLangExecutorRuntimeException(node,ex.getMessage(),ex);
}
node.initialized = true;
}
try {
node.custTrans.transform(inputRecords, outputRecords);
} catch (Exception ex) {
throw new TransformLangExecutorRuntimeException(node,ex.getMessage(),ex);
}
return data;
}
/*
* Declaration & calling of Functions here
*/
public Object visit(CLVFFunctionCallStatement node, Object data) {
// EXTERNAL FUNCTION
if (node.externalFunction != null) {
// put call parameters on stack
node.childrenAccept(this, data);
// convert stack content into values
try {
node.context.setGraph(this.graph);
TLValue returnVal = node.externalFunction.execute(stack.pop(
node.externalFunctionParams, node.jjtGetNumChildren()),
node.context);
stack.push(returnVal);
} catch (TransformLangExecutorRuntimeException ex) {
ex.setNode(node);
throw ex;
} catch (Exception ex){
String msg="Java exception ["+ex.getClass().getName()+"] occured during call of external function: "+node.externalFunction.getLibrary()+"."+node.externalFunction.getName();
logger.debug(msg,ex);
throw new TransformLangExecutorRuntimeException(node,msg,ex);
}
} else {
// INTERNAL FUNCTION
// put call parameters on stack
node.childrenAccept(this, data);
CLVFFunctionDeclaration executionNode = node.callNode;
// open call frame
stack.pushFuncCallFrame();
// store call parameters from stack as local variables
for (int i = executionNode.numParams - 1; i >= 0; stack
.storeLocalVar(i--, new TLVariable("local", stack.pop())))
;
// execute function body
// loop execution
TLValue returnData;
final int numChildren = executionNode.jjtGetNumChildren();
for (int i = 0; i < numChildren; i++) {
executionNode.jjtGetChild(i).jjtAccept(this, data);
returnData = stack.pop(); // in case there is anything on top
// of stack
// check for break or continue statements
if (breakFlag) {
breakFlag = false;
if (breakType == BREAK_RETURN) {
if (returnData != null)
stack.push(returnData);
break;
}
}
}
stack.popFuncCallFrame();
}
return data;
}
public Object visit(CLVFFunctionDeclaration node, Object data) {
return data;
}
public Object visit(CLVFStatementExpression node, Object data) {
node.jjtGetChild(0).jjtAccept(this, data);
return data;
}
public Object executeFunction(CLVFFunctionDeclaration executionNode, TLValue[] data) {
//put call parameters on stack
if (data==null){
data=new TLValue[0];
}
//TODO - check for function call parameter types
// open call frame
stack.pushFuncCallFrame();
// store call parameters from stack as local variables
for (int i=executionNode.numParams-1;i>=0; i--) {
stack.storeLocalVar(i, new TLVariable(executionNode.varNames[i], data[i]));
}
// execute function body
// loop execution
TLValue returnData;
final int numChildren=executionNode.jjtGetNumChildren();
for (int i=0;i<numChildren;i++){
executionNode.jjtGetChild(i).jjtAccept(this,data);
returnData=stack.pop(); // in case there is anything on top of stack
// check for break or continue statements
if (breakFlag){
breakFlag=false;
if (breakType==BREAK_RETURN){
if (returnData!=null)
stack.push(returnData);
break;
}
}
}
stack.popFuncCallFrame();
return data;
}
public Object visit(CLVFRaiseErrorNode node,Object data){
node.jjtGetChild(0).jjtAccept(this, data);
TLValue a = stack.pop();
throw new TransformLangExecutorRuntimeException(node,null,
"!!! Exception raised by user: "+((a!=null) ? a.toString() : "no message"));
}
public Object visit(CLVFEvalNode node, Object data) {
// get TL expression
node.jjtGetChild(0).jjtAccept(this, data);
String src=stack.pop().toString();
Node parseTree;
// construct parser
try{
((TransformLangParser)parser).ReInit(new CharSequenceReader(src));
if (node.expMode)
parseTree = ((TransformLangParser)parser).StartExpression();
else
parseTree = ((TransformLangParser)parser).Start();
}catch(ParseException ex){
throw new TransformLangExecutorRuntimeException(node,
"Can't parse \"eval\" expression:"+ex.getMessage());
}catch(NullPointerException ex){
throw new RuntimeException("Error in \"eval\" execution/parsing (parser is missing)." ,ex);
}
/*
* option to permanently store parsed expression in this tree
if (true){
//add this subtree to eclosing AST
}
*/
// execute eval
if (node.expMode)
visit((CLVFStartExpression)parseTree,data);
else
visit((CLVFStart)parseTree,data);
return data;
}
public Object visit(CLVFSequenceNode node,Object data){
if (node.sequence==null){
if (graph!=null){
node.sequence=graph.getSequence(node.sequenceName);
}else{
throw new TransformLangExecutorRuntimeException(node,
"Can't obtain Sequence \""+node.sequenceName+
"\" from graph - graph is not assigned");
}
if (node.sequence==null){
throw new TransformLangExecutorRuntimeException(node,
"Can't obtain Sequence \""+node.sequenceName+
"\" from graph \""+graph.getName()+"\"");
}
// initialize the sequence if necessary
if (!node.sequence.isInitialized()) {
try {
node.sequence.init();
} catch (ComponentNotReadyException e) {
throw new TransformLangExecutorRuntimeException(
node,"Unable to initialize sequence "
+ "\"" + node.sequenceName + "\"",e);
}
}
}
if (node.value==null){
switch(node.retType){
case LONG_VAR:
node.value=TLValue.create(TLValueType.LONG);
break;
case STRING_VAR:
node.value=TLValue.create(TLValueType.STRING);
break;
default:
node.value=TLValue.create(TLValueType.INTEGER);
}
}
TLValue retVal=node.value;
switch(node.opType){
case CLVFSequenceNode.OP_RESET:
// try{
node.sequence.resetValue();
// }catch(ComponentNotReadyException ex){
// throw new TransformLangExecutorRuntimeException(node,"Error when resetting sequence \""+node.sequenceName+"\"",ex);
// }
retVal=TLNumericValue.ZERO;
break;
case CLVFSequenceNode.OP_CURRENT:
switch(node.retType){
case LONG_VAR:
((TLNumericValue)retVal).setValue(node.sequence.currentValueLong());
break;
case STRING_VAR:
retVal.setValue(node.sequence.currentValueString());
break;
default:
((TLNumericValue)retVal).setValue(node.sequence.currentValueInt());
}
break;
default: // default is next value from sequence
switch(node.retType){
case LONG_VAR:
((TLNumericValue)retVal).setValue(node.sequence.nextValueLong());
break;
case STRING_VAR:
retVal.setValue(node.sequence.nextValueString());
break;
default:
((TLNumericValue)retVal).setValue(node.sequence.nextValueInt());
}
}
stack.push(retVal);
return data;
}
public Object visit(CLVFLookupNode node, Object data) {
DataRecord record = null;
if (node.lookupTable == null) {
node.lookupTable = graph.getLookupTable(node.lookupName);
if (node.lookupTable == null) {
throw new TransformLangExecutorRuntimeException(node,
"Can't obtain LookupTable \"" + node.lookupName
+ "\" from graph \"" + graph.getName() + "\"");
}
else {
// we have to initialize the lookup table ourselves, graph is not doing it for us
try {
if (! node.lookupTable.isInitialized()) {
node.lookupTable.init();
}
} catch (ComponentNotReadyException e) {
throw new TransformLangExecutorRuntimeException(node,
"Error when initializing lookup table \""
+ node.lookupName + "\" :", e);
}
}
if (node.lookup == null && lookups.containsKey(node.lookupTable.getId())) {
node.lookup = lookups.get(node.lookupTable.getId());
}
if (node.opType == CLVFLookupNode.OP_GET || node.opType==CLVFLookupNode.OP_NEXT) {
DataRecordMetadata metadata = node.lookupTable.getMetadata();
if (metadata != null) {
node.fieldNum = metadata.getFieldPosition(
node.fieldName);
if (node.fieldNum < 0) {
throw new TransformLangExecutorRuntimeException(node,
"Invalid field name \"" + node.fieldName
+ "\" at LookupTable \"" + node.lookupName
+ "\" in graph \"" + graph.getName() + "\"");
}
}
}
}
switch (node.opType) {
case CLVFLookupNode.OP_INIT:
// The code is removed from CTL1 after discuession with Kokon, in CTL2 these functions do not exist anymore as they are not needed
/*try {
node.lookupTable.init();
node.lookupTable.preExecute();
} catch (ComponentNotReadyException ex) {
throw new TransformLangExecutorRuntimeException(node,
"Error when initializing lookup table \""
+ node.lookupName + "\" :", ex);
}*/
return data;
case CLVFLookupNode.OP_FREE:
// The code is removed from CTL1 after discuession with Kokon, in CTL2 these functions do not exist anymore as they are not needed
/*node.lookupTable.free();
node.lookup = null;
lookups.remove(node.lookupTable.getId());*/
return data;
case CLVFLookupNode.OP_NUM_FOUND:
stack.push(new TLNumericValue(TLValueType.INTEGER, new CloverInteger(
node.lookup.getNumFound())));
return data;
case CLVFLookupNode.OP_GET:
node.childrenAccept(this, data);
if (node.lookup == null) {
try {
node.createLookup(stack);
} catch (ComponentNotReadyException ex) {
throw new TransformLangExecutorRuntimeException(node,
"Error when initializing lookup table \""
+ node.lookupName + "\" :", ex);
}
lookups.put(node.lookupTable.getId(), node.lookup);
}
node.seek(stack);
if (node.fieldNum == -1) {
node.fieldNum = node.lookupTable.getMetadata().getFieldPosition(node.fieldName);
}
if (node.lookup.hasNext()) {
record = node.lookup.next();
}else{
record = null;
}
break;
default: // CLVFLookupNode.OP_NEXT:
if (node.lookup.hasNext()) {
record = node.lookup.next();
}else{
record = null;
}
}
if (record != null) {
stack.push(TLValue.convertValue(record.getField(node.fieldNum)));
} else {
stack.push(TLNullValue.getInstance());
}
return data;
}
public Object visit(CLVFDictionaryNode node, Object data) {
final Dictionary d = graph.getDictionary();
if (d == null) {
throw new TransformLangExecutorRuntimeException("No dictionary defined on the graph");
}
TLValue key = null;
TLValue value = null;
switch (node.operation) {
case CLVFDictionaryNode.OP_READ:
// evaluate the key
node.jjtGetChild(0).jjtAccept(this, data);
key = stack.pop();
if (key.getType() != TLValueType.STRING) {
throw new TransformLangExecutorRuntimeException("Dictionary supports only non-null string keys");
}
final Object dictValue = d.getValue(((StringBuilder)key.getValue()).toString());
stack.push(dictValue == null ? TLNullValue.getInstance() : new TLStringValue(dictValue.toString()));
break;
case CLVFDictionaryNode.OP_WRITE:
node.jjtGetChild(0).jjtAccept(this, data);
key = stack.pop();
if (key.getType() != TLValueType.STRING) {
throw new TransformLangExecutorRuntimeException("Dictionary supports only string keys");
}
final String keyToWrite = ((StringBuilder)key.getValue()).toString();
node.jjtGetChild(1).jjtAccept(this, data);
value = stack.pop();
String valueToWrite = null;
if (value == TLNullValue.getInstance()) {
// writing null value
valueToWrite = null;
} else if (value.getType() == TLValueType.STRING) {
// convert string value to string
valueToWrite = ((StringBuilder)value.getValue()).toString();
} else {
// anything non-null, non-string is error
throw new TransformLangExecutorRuntimeException("Dictionary supports only string values");
}
try {
d.setValue(keyToWrite, StringDictionaryType.TYPE_ID, valueToWrite);
} catch (ComponentNotReadyException e) {
throw new TransformLangExecutorRuntimeException("Cannot set dictionary key '" + keyToWrite + "' to value '" + valueToWrite + "'",e);
}
break;
case CLVFDictionaryNode.OP_DELETE:
// evaluate the key
node.jjtGetChild(0).jjtAccept(this, data);
key = stack.pop();
if (key.getType() != TLValueType.STRING) {
throw new TransformLangExecutorRuntimeException("Dictionary supports only non-null string keys");
}
final String keyToDelete = ((StringBuilder)key.getValue()).toString();
try {
d.setValue(keyToDelete, null);
} catch (ComponentNotReadyException e) {
throw new TransformLangExecutorRuntimeException("Cannot delete key '" + keyToDelete + "'");
}
break;
default:
throw new TransformLangExecutorRuntimeException("Unknown dictionary operation: " + node.operation);
}
return data;
}
public Object visit(CLVFPrintLogNode node, Object data) {
if (runtimeLogger == null) {
throw new TransformLangExecutorRuntimeException(node,
"Can NOT perform logging operation - no logger defined");
}
node.jjtGetChild(0).jjtAccept(this, data);
TLValue msg = stack.pop();
switch (node.level) {
case 1: //| "debug"
runtimeLogger.debug(msg);
break;
case 2: //| "info"
runtimeLogger.info(msg);
break;
case 3: //| "warn"
runtimeLogger.warn(msg);
break;
case 4: //| "error"
runtimeLogger.error(msg);
break;
case 5: //| "fatal"
runtimeLogger.fatal(msg);
break;
default:
runtimeLogger.trace(msg);
}
return data;
}
public Object visit(CLVFImportSource node,Object data) {
node.childrenAccept(this, data);
return data;
}
public Object visit(CLVFSymbolNameExp node,Object data) {
stack.push(node.typeValue);
return data;
}
public Object visit(CLVFOperator node,Object data) {
return data;
}
public Object visit(CLVFPostfixExpression node,Object data) {
// get variable && put value on stack by executing child node
Node child=node.jjtGetChild(0);
if (! (child instanceof CLVFVariableLiteral)){
throw new TransformLangExecutorRuntimeException(node,"postfix expression is allowed only on variable");
}
TLVariable var=(TLVariable)child.jjtAccept(this, data);
int operatorType=((CLVFOperator)node.jjtGetChild(1)).kind;
// value instance on stack is variable's internal value
// duplicate it before incrementing
TLValue origValue = stack.pop();
stack.push(origValue.duplicate());
if (operatorType==INCR) {
if (var.getType().isNumeric()) {
((TLNumericValue)var.getTLValue()).add(Stack.NUM_ONE_P);
}else if (var.getType()==TLValueType.DATE) {
stack.calendar.setTime(((TLDateValue)var.getTLValue()).getDate());
stack.calendar.add(Calendar.DATE, 1);
((TLDateValue)var.getTLValue()).setValue(stack.calendar.getTime());
}else {
throw new TransformLangExecutorRuntimeException(node,"variable ["+var+"] is not of numeric or date type");
}
}else{
if (var.getType().isNumeric()) {
((TLNumericValue)var.getTLValue()).sub(Stack.NUM_ONE_P);
}else if (var.getType()==TLValueType.DATE) {
stack.calendar.setTime(((TLDateValue)var.getTLValue()).getDate());
stack.calendar.add(Calendar.DATE, -1);
var.getTLValue().setValue(stack.calendar.getTime());
}else {
throw new TransformLangExecutorRuntimeException(node,"variable ["+var+"] is not of numeric or date type");
}
}
return data;
}
public Object visit(CLVFUnaryExpression node,Object data) {
int operatorType=((CLVFOperator)node.jjtGetChild(0)).kind;
Node child=node.jjtGetChild(1);
TLValue val;
switch (operatorType) {
case INCR:
case DECR:
// get variable && put value on stack by executing child node
if (!(child instanceof CLVFVariableLiteral)) {
throw new TransformLangExecutorRuntimeException(node,
"postfix expression is allowed only on variable");
}
TLVariable var = (TLVariable) child.jjtAccept(this, data);
if (var.getType().isNumeric()) {
((TLNumericValue)var.getTLValue()).add(
operatorType == INCR ? Stack.NUM_ONE_P
: Stack.NUM_MINUS_ONE_P);
} else if (var.getType() == TLValueType.DATE) {
stack.calendar.setTime(((TLDateValue)var.getTLValue()).getDate());
stack.calendar
.add(Calendar.DATE, operatorType == INCR ? 1 : -1);
var.getTLValue().setValue(stack.calendar.getTime());
} else {
throw new TransformLangExecutorRuntimeException(node,
"variable [" + var + "] is not of numeric or date type");
}
child.jjtAccept(this, data);
break;
case NOT:
child.jjtAccept(this, data);
val = stack.pop();
if (val.type == TLValueType.BOOLEAN) {
stack.push(val==TLBooleanValue.TRUE ? TLBooleanValue.FALSE : TLBooleanValue.TRUE);
} else {
throw new TransformLangExecutorRuntimeException(node,
new Object[] { val },
"logical condition does not evaluate to BOOLEAN value");
}
break;
case MINUS:
child.jjtAccept(this, data);
val = stack.pop();
if (val.type.isNumeric()) {
val = val.duplicate();
((TLNumericValue)val).neg();
stack.push(val);
} else {
throw new TransformLangExecutorRuntimeException(node,
new Object[] { val }, "variable is not of numeric type");
}
break;
case PLUS:
child.jjtAccept(this, data);
val = stack.pop();
if (val.type.isNumeric()) {
val = val.duplicate();
((TLNumericValue)val).abs();
stack.push(val);
} else {
throw new TransformLangExecutorRuntimeException(node,
new Object[] { val }, "variable is not of numeric type");
}
break;
default:
throw new TransformLangExecutorRuntimeException(node,
"unsupported operation");
}
return data;
}
public Object visit(CLVFListOfLiterals node, Object data) {
stack.push(node.value);
return data;
}
}
| FIX: Issue 3298 Decimal defaults to 0.0 now in CTL1 (was NaN)
git-svn-id: 7003860f782148507aa0d02fa3b12992383fb6a5@9006 a09ad3ba-1a0f-0410-b1b9-c67202f10d70
| cloveretl.engine/src/org/jetel/interpreter/TransformLangExecutor.java | FIX: Issue 3298 Decimal defaults to 0.0 now in CTL1 (was NaN) |
|
Java | lgpl-2.1 | 74ba7cb50541e5aa22e9b808a3cb32529b7a43b0 | 0 | deegree/deegree3,deegree/deegree3,deegree/deegree3,deegree/deegree3,deegree/deegree3 | //$HeadURL$
/*----------------------------------------------------------------------------
This file is part of deegree, http://deegree.org/
Copyright (C) 2001-2009 by:
Department of Geography, University of Bonn
and
lat/lon GmbH
This library is free software; you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License as published by the Free
Software Foundation; either version 2.1 of the License, or (at your option)
any later version.
This library is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
details.
You should have received a copy of the GNU Lesser General Public License
along with this library; if not, write to the Free Software Foundation, Inc.,
59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
Contact information:
lat/lon GmbH
Aennchenstr. 19, 53177 Bonn
Germany
http://lat-lon.de/
Department of Geography, University of Bonn
Prof. Dr. Klaus Greve
Postfach 1147, 53001 Bonn
Germany
http://www.geographie.uni-bonn.de/deegree/
e-mail: [email protected]
----------------------------------------------------------------------------*/
package org.deegree.rendering.r2d.se.parser;
import static java.awt.Font.TRUETYPE_FONT;
import static java.awt.Font.TYPE1_FONT;
import static java.awt.Font.createFont;
import static java.lang.Double.NEGATIVE_INFINITY;
import static java.lang.Double.POSITIVE_INFINITY;
import static java.lang.Double.parseDouble;
import static java.lang.Float.parseFloat;
import static java.lang.Math.max;
import static java.lang.Math.min;
import static javax.xml.stream.XMLStreamConstants.END_ELEMENT;
import static javax.xml.stream.XMLStreamConstants.START_DOCUMENT;
import static javax.xml.stream.XMLStreamConstants.START_ELEMENT;
import static org.deegree.commons.utils.ArrayUtils.splitAsDoubles;
import static org.deegree.commons.utils.ColorUtils.decodeWithAlpha;
import static org.deegree.commons.xml.CommonNamespaces.SENS;
import static org.deegree.commons.xml.CommonNamespaces.XLNNS;
import static org.deegree.commons.xml.stax.StAXParsingHelper.getElementTextAsBoolean;
import static org.deegree.commons.xml.stax.StAXParsingHelper.getElementTextAsQName;
import static org.deegree.commons.xml.stax.StAXParsingHelper.resolve;
import static org.deegree.commons.xml.stax.StAXParsingHelper.skipElement;
import static org.deegree.filter.xml.Filter110XMLDecoder.parseExpression;
import static org.deegree.rendering.i18n.Messages.get;
import static org.deegree.rendering.r2d.se.unevaluated.Continuation.SBUPDATER;
import static org.deegree.rendering.r2d.styling.components.Stroke.LineCap.BUTT;
import static org.deegree.rendering.r2d.styling.components.Stroke.LineJoin.ROUND;
import static org.deegree.rendering.r2d.styling.components.UOM.Foot;
import static org.deegree.rendering.r2d.styling.components.UOM.Metre;
import static org.deegree.rendering.r2d.styling.components.UOM.Pixel;
import static org.deegree.rendering.r2d.styling.components.UOM.mm;
import static org.slf4j.LoggerFactory.getLogger;
import java.awt.Color;
import java.awt.FontFormatException;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringWriter;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.imageio.ImageIO;
import javax.xml.namespace.QName;
import javax.xml.stream.FactoryConfigurationError;
import javax.xml.stream.Location;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.stream.XMLStreamWriter;
import org.apache.xerces.impl.dv.util.Base64;
import org.deegree.commons.tom.TypedObjectNode;
import org.deegree.commons.utils.DoublePair;
import org.deegree.commons.utils.Pair;
import org.deegree.commons.utils.Triple;
import org.deegree.commons.xml.stax.StAXParsingHelper;
import org.deegree.feature.Feature;
import org.deegree.filter.Expression;
import org.deegree.filter.Filter;
import org.deegree.filter.FilterEvaluationException;
import org.deegree.filter.XPathEvaluator;
import org.deegree.filter.expression.custom.se.Categorize;
import org.deegree.filter.expression.custom.se.Interpolate;
import org.deegree.filter.xml.Filter110XMLDecoder;
import org.deegree.filter.xml.Filter110XMLEncoder;
import org.deegree.rendering.r2d.RenderHelper;
import org.deegree.rendering.r2d.se.unevaluated.Continuation;
import org.deegree.rendering.r2d.se.unevaluated.Symbolizer;
import org.deegree.rendering.r2d.se.unevaluated.Continuation.Updater;
import org.deegree.rendering.r2d.styling.LineStyling;
import org.deegree.rendering.r2d.styling.PointStyling;
import org.deegree.rendering.r2d.styling.PolygonStyling;
import org.deegree.rendering.r2d.styling.RasterChannelSelection;
import org.deegree.rendering.r2d.styling.RasterStyling;
import org.deegree.rendering.r2d.styling.TextStyling;
import org.deegree.rendering.r2d.styling.RasterStyling.ContrastEnhancement;
import org.deegree.rendering.r2d.styling.RasterStyling.Overlap;
import org.deegree.rendering.r2d.styling.RasterStyling.ShadedRelief;
import org.deegree.rendering.r2d.styling.components.Fill;
import org.deegree.rendering.r2d.styling.components.Font;
import org.deegree.rendering.r2d.styling.components.Graphic;
import org.deegree.rendering.r2d.styling.components.Halo;
import org.deegree.rendering.r2d.styling.components.LinePlacement;
import org.deegree.rendering.r2d.styling.components.Mark;
import org.deegree.rendering.r2d.styling.components.PerpendicularOffsetType;
import org.deegree.rendering.r2d.styling.components.Stroke;
import org.deegree.rendering.r2d.styling.components.UOM;
import org.deegree.rendering.r2d.styling.components.Font.Style;
import org.deegree.rendering.r2d.styling.components.Mark.SimpleMark;
import org.deegree.rendering.r2d.styling.components.PerpendicularOffsetType.Substraction;
import org.deegree.rendering.r2d.styling.components.PerpendicularOffsetType.Type;
import org.deegree.rendering.r2d.styling.components.Stroke.LineCap;
import org.deegree.rendering.r2d.styling.components.Stroke.LineJoin;
import org.slf4j.Logger;
/**
* <code>SymbologyParser</code> parses the SE part of 1.1.0 and the corresponding SLD 1.0.0 part.
*
* @author <a href="mailto:[email protected]">Andreas Schmitz</a>
* @author last edited by: $Author$
*
* @version $Revision$, $Date$
*/
public class SymbologyParser {
private boolean collectXMLSnippets = false;
static final Logger LOG = getLogger( SymbologyParser.class );
/**
* A static elsefilter instance (think of it as a marker).
*/
public static final ElseFilter ELSEFILTER = new ElseFilter();
/**
* A default instance.
*/
public static final SymbologyParser INSTANCE = new SymbologyParser();
/**
* Constructs one which does not collect source snippets.
*/
public SymbologyParser() {
// default values
}
/**
* @param collectXMLSnippets
* if true, some source snippets are collected (which can be used for re-export)
*/
public SymbologyParser( boolean collectXMLSnippets ) {
this.collectXMLSnippets = collectXMLSnippets;
}
private static boolean require( XMLStreamReader in, String elementName ) {
if ( !( in.getLocalName().equals( elementName ) && in.isStartElement() ) ) {
Location loc = in.getLocation();
LOG.error( "Expected a '{}' element at line {} column {}.", new Object[] { elementName,
loc.getLineNumber(),
loc.getColumnNumber() } );
return false;
}
return true;
}
/**
* @param in
* @return the resolved href attribute
* @throws XMLStreamException
* @throws MalformedURLException
*/
public static URL parseOnlineResource( XMLStreamReader in )
throws XMLStreamException, MalformedURLException {
if ( !require( in, "OnlineResource" ) ) {
return null;
}
String url = in.getAttributeValue( null, "href" );
URL resolved = StAXParsingHelper.resolve( url, in );
in.nextTag();
in.require( END_ELEMENT, null, "OnlineResource" );
return resolved;
}
private static void checkCommon( Common common, XMLStreamReader in )
throws XMLStreamException {
if ( in.getLocalName().equals( "Name" ) ) {
common.name = in.getElementText();
}
Location l = in.getLocation();
if ( in.getLocalName().equals( "Geometry" ) ) {
common.loc = l.getSystemId();
common.line = l.getLineNumber();
common.col = l.getColumnNumber();
in.nextTag();
common.geometry = parseExpression( in );
in.nextTag();
in.require( END_ELEMENT, null, "Geometry" );
}
if ( in.getLocalName().equals( "Description" ) ) {
while ( !( in.isEndElement() && in.getLocalName().equals( "Description" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "Title" ) ) {
common.title = in.getElementText();
} else if ( in.getLocalName().equals( "Abstract" ) ) {
common.abstract_ = in.getElementText();
} else if ( in.isStartElement() ) {
Location loc = l;
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
}
// in case of SLD 1.0.0:
if ( in.getLocalName().equals( "Title" ) ) {
common.title = in.getElementText();
in.nextTag();
}
if ( in.getLocalName().equals( "Abstract" ) ) {
common.abstract_ = in.getElementText();
in.nextTag();
}
}
private Pair<Fill, Continuation<Fill>> parseFill( XMLStreamReader in )
throws XMLStreamException {
in.require( START_ELEMENT, null, "Fill" );
Fill base = new Fill();
Continuation<Fill> contn = null;
while ( !( in.isEndElement() && in.getLocalName().equals( "Fill" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "GraphicFill" ) ) {
in.nextTag();
final Pair<Graphic, Continuation<Graphic>> pair = parseGraphic( in );
if ( pair != null ) {
base.graphic = pair.first;
if ( pair.second != null ) {
contn = new Continuation<Fill>( contn ) {
@Override
public void updateStep( Fill base, Feature f, XPathEvaluator<Feature> evaluator ) {
pair.second.evaluate( base.graphic, f, evaluator );
}
};
}
}
in.nextTag();
} else if ( in.getLocalName().endsWith( "Parameter" ) ) {
String cssName = in.getAttributeValue( null, "name" );
if ( cssName.equals( "fill" ) ) {
contn = updateOrContinue( in, "Parameter", base, new Updater<Fill>() {
@Override
public void update( Fill obj, String val ) {
// keep alpha value
int alpha = obj.color.getAlpha();
obj.color = decodeWithAlpha( val );
obj.color = new Color( obj.color.getRed(), obj.color.getGreen(), obj.color.getBlue(), alpha );
}
}, contn ).second;
}
if ( cssName.equals( "fill-opacity" ) ) {
contn = updateOrContinue( in, "Parameter", base, new Updater<Fill>() {
@Override
public void update( Fill obj, String val ) {
// keep original color
float alpha = max( 0, min( 1, parseFloat( val ) ) );
float[] cols = obj.color.getRGBColorComponents( null );
obj.color = new Color( cols[0], cols[1], cols[2], alpha );
}
}, contn ).second;
}
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
in.require( END_ELEMENT, null, "Fill" );
return new Pair<Fill, Continuation<Fill>>( base, contn );
}
private Pair<Stroke, Continuation<Stroke>> parseStroke( XMLStreamReader in )
throws XMLStreamException {
in.require( START_ELEMENT, null, "Stroke" );
Stroke base = new Stroke();
Continuation<Stroke> contn = null;
while ( !( in.isEndElement() && in.getLocalName().equals( "Stroke" ) ) ) {
in.nextTag();
if ( in.getLocalName().endsWith( "Parameter" ) ) {
String name = in.getAttributeValue( null, "name" );
if ( name.equals( "stroke" ) ) {
contn = updateOrContinue( in, "Parameter", base, new Updater<Stroke>() {
@Override
public void update( Stroke obj, String val ) {
// keep alpha value
int alpha = obj.color.getAlpha();
obj.color = decodeWithAlpha( val );
obj.color = new Color( obj.color.getRed(), obj.color.getGreen(), obj.color.getBlue(), alpha );
}
}, contn ).second;
} else if ( name.equals( "stroke-opacity" ) ) {
contn = updateOrContinue( in, "Parameter", base, new Updater<Stroke>() {
@Override
public void update( Stroke obj, String val ) {
// keep original color
float alpha = Float.parseFloat( val );
float[] cols = obj.color.getRGBColorComponents( null );
obj.color = new Color( cols[0], cols[1], cols[2], alpha );
}
}, contn ).second;
} else if ( name.equals( "stroke-width" ) ) {
contn = updateOrContinue( in, "Parameter", base, new Updater<Stroke>() {
@Override
public void update( Stroke obj, String val ) {
obj.width = Double.parseDouble( val );
}
}, contn ).second;
} else if ( name.equals( "stroke-linejoin" ) ) {
contn = updateOrContinue( in, "Parameter", base, new Updater<Stroke>() {
@Override
public void update( Stroke obj, String val ) {
try {
obj.linejoin = LineJoin.valueOf( val.toUpperCase() );
} catch ( IllegalArgumentException e ) {
LOG.warn( "Used invalid value '{}' for line join.", val );
obj.linejoin = ROUND;
}
}
}, contn ).second;
} else if ( name.equals( "stroke-linecap" ) ) {
contn = updateOrContinue( in, "Parameter", base, new Updater<Stroke>() {
@Override
public void update( Stroke obj, String val ) {
try {
obj.linecap = LineCap.valueOf( val.toUpperCase() );
} catch ( IllegalArgumentException e ) {
LOG.warn( "Used invalid value '{}' for line cap.", val );
obj.linecap = BUTT;
}
}
}, contn ).second;
} else if ( name.equals( "stroke-dasharray" ) ) {
contn = updateOrContinue( in, "Parameter", base, new Updater<Stroke>() {
@Override
public void update( Stroke obj, String val ) {
// , is not strictly allowed, but we don't lose anything by being flexible
if ( val.contains( "," ) ) {
obj.dasharray = splitAsDoubles( val, "," );
} else {
obj.dasharray = splitAsDoubles( val, "\\s" );
}
}
}, contn ).second;
} else if ( name.equals( "stroke-dashoffset" ) ) {
contn = updateOrContinue( in, "Parameter", base, new Updater<Stroke>() {
@Override
public void update( Stroke obj, String val ) {
obj.dashoffset = Double.parseDouble( val );
}
}, contn ).second;
} else {
Location loc = in.getLocation();
LOG.error( "Found unknown parameter '{}' at line {}, column {}, skipping.",
new Object[] { name, loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
in.require( END_ELEMENT, null, null );
} else if ( in.getLocalName().equals( "GraphicFill" ) ) {
in.nextTag();
final Pair<Graphic, Continuation<Graphic>> pair = parseGraphic( in );
if ( pair != null ) {
base.fill = pair.first;
if ( pair.second != null ) {
contn = new Continuation<Stroke>( contn ) {
@Override
public void updateStep( Stroke base, Feature f, XPathEvaluator<Feature> evaluator ) {
pair.second.evaluate( base.fill, f, evaluator );
}
};
}
}
in.require( END_ELEMENT, null, "Graphic" );
in.nextTag();
in.require( END_ELEMENT, null, "GraphicFill" );
} else if ( in.getLocalName().equals( "GraphicStroke" ) ) {
while ( !( in.isEndElement() && in.getLocalName().equals( "GraphicStroke" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "Graphic" ) ) {
final Pair<Graphic, Continuation<Graphic>> pair = parseGraphic( in );
if ( pair != null ) {
base.stroke = pair.first;
if ( pair.second != null ) {
contn = new Continuation<Stroke>( contn ) {
@Override
public void updateStep( Stroke base, Feature f, XPathEvaluator<Feature> evaluator ) {
pair.second.evaluate( base.stroke, f, evaluator );
}
};
}
}
in.require( END_ELEMENT, null, "Graphic" );
} else if ( in.getLocalName().equals( "InitialGap" ) ) {
contn = updateOrContinue( in, "InitialGap", base, new Updater<Stroke>() {
@Override
public void update( Stroke obj, String val ) {
obj.strokeInitialGap = Double.parseDouble( val );
}
}, contn ).second;
in.require( END_ELEMENT, null, "InitialGap" );
} else if ( in.getLocalName().equals( "Gap" ) ) {
contn = updateOrContinue( in, "Gap", base, new Updater<Stroke>() {
@Override
public void update( Stroke obj, String val ) {
obj.strokeGap = Double.parseDouble( val );
}
}, contn ).second;
in.require( END_ELEMENT, null, "Gap" );
} else if ( in.getLocalName().equals( "PositionPercentage" ) ) {
contn = updateOrContinue( in, "PositionPercentage", base, new Updater<Stroke>() {
@Override
public void update( Stroke obj, String val ) {
obj.positionPercentage = Double.parseDouble( val );
}
}, contn ).second;
in.require( END_ELEMENT, null, "PositionPercentage" );
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
} else if ( in.isStartElement() ) {
LOG.error( "Found unknown element '{}', skipping.", in.getLocalName() );
skipElement( in );
}
}
in.require( END_ELEMENT, null, "Stroke" );
return new Pair<Stroke, Continuation<Stroke>>( base, contn );
}
private Pair<Mark, Continuation<Mark>> parseMark( XMLStreamReader in )
throws XMLStreamException {
in.require( START_ELEMENT, null, "Mark" );
Mark base = new Mark();
Continuation<Mark> contn = null;
in.nextTag();
while ( !( in.isEndElement() && in.getLocalName().equals( "Mark" ) ) ) {
if ( in.isEndElement() ) {
in.nextTag();
}
if ( in.getLocalName().equals( "WellKnownName" ) ) {
String wkn = in.getElementText();
try {
base.wellKnown = SimpleMark.valueOf( wkn.toUpperCase() );
} catch ( IllegalArgumentException e ) {
LOG.warn( "Specified unsupported WellKnownName of '{}', using square instead.", wkn );
base.wellKnown = SimpleMark.SQUARE;
}
} else
sym: if ( in.getLocalName().equals( "OnlineResource" ) || in.getLocalName().equals( "InlineContent" ) ) {
LOG.debug( "Loading mark from external file." );
Triple<InputStream, String, Continuation<StringBuffer>> pair = getOnlineResourceOrInlineContent( in );
if ( pair == null ) {
in.nextTag();
break sym;
}
InputStream is = pair.first;
in.nextTag();
in.require( START_ELEMENT, null, "Format" );
String format = in.getElementText();
in.require( END_ELEMENT, null, "Format" );
in.nextTag();
if ( in.getLocalName().equals( "MarkIndex" ) ) {
base.markIndex = Integer.parseInt( in.getElementText() );
}
if ( is != null ) {
try {
java.awt.Font font = null;
if ( format.equalsIgnoreCase( "ttf" ) ) {
font = createFont( TRUETYPE_FONT, is );
}
if ( format.equalsIgnoreCase( "type1" ) ) {
font = createFont( TYPE1_FONT, is );
}
if ( format.equalsIgnoreCase( "svg" ) ) {
base.shape = RenderHelper.getShapeFromSvg( is, pair.second );
}
if ( font == null && base.shape == null ) {
LOG.warn( "Mark was not loaded, because the format '{}' is not supported.", format );
break sym;
}
if ( font != null && base.markIndex >= font.getNumGlyphs() - 1 ) {
LOG.warn( "The font only contains {} glyphs, but the index given was {}.",
font.getNumGlyphs(), base.markIndex );
break sym;
}
base.font = font;
} catch ( FontFormatException e ) {
LOG.debug( "Stack trace:", e );
LOG.warn( "The file was not a valid '{}' file: '{}'", format, e.getLocalizedMessage() );
} catch ( IOException e ) {
LOG.debug( "Stack trace:", e );
LOG.warn( "The file could not be read: '{}'.", e.getLocalizedMessage() );
}
}
} else if ( in.getLocalName().equals( "Fill" ) ) {
final Pair<Fill, Continuation<Fill>> fill = parseFill( in );
base.fill = fill.first;
if ( fill.second != null ) {
contn = new Continuation<Mark>( contn ) {
@Override
public void updateStep( Mark base, Feature f, XPathEvaluator<Feature> evaluator ) {
fill.second.evaluate( base.fill, f, evaluator );
}
};
}
} else if ( in.getLocalName().equals( "Stroke" ) ) {
final Pair<Stroke, Continuation<Stroke>> stroke = parseStroke( in );
base.stroke = stroke.first;
if ( stroke.second != null ) {
contn = new Continuation<Mark>( contn ) {
@Override
public void updateStep( Mark base, Feature f, XPathEvaluator<Feature> evaluator ) {
stroke.second.evaluate( base.stroke, f, evaluator );
}
};
}
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
in.require( END_ELEMENT, null, "Mark" );
return new Pair<Mark, Continuation<Mark>>( base, contn );
}
private Triple<InputStream, String, Continuation<StringBuffer>> getOnlineResourceOrInlineContent( XMLStreamReader in )
throws XMLStreamException {
if ( in.getLocalName().equals( "OnlineResource" ) ) {
String str = in.getAttributeValue( XLNNS, "href" );
if ( str == null ) {
Continuation<StringBuffer> contn = updateOrContinue( in, "OnlineResource", new StringBuffer(),
SBUPDATER, null ).second;
return new Triple<InputStream, String, Continuation<StringBuffer>>( null, null, contn );
}
String strUrl = null;
try {
URL url = resolve( str, in );
strUrl = url.toExternalForm();
LOG.debug( "Loading from URL '{}'", url );
in.nextTag();
return new Triple<InputStream, String, Continuation<StringBuffer>>( url.openStream(), strUrl, null );
} catch ( IOException e ) {
LOG.debug( "Stack trace:", e );
LOG.warn( "Could not retrieve content at URL '{}'.", str );
return null;
}
} else if ( in.getLocalName().equals( "InlineContent" ) ) {
String format = in.getAttributeValue( null, "encoding" );
if ( format.equalsIgnoreCase( "base64" ) ) {
ByteArrayInputStream bis = new ByteArrayInputStream( Base64.decode( in.getElementText() ) );
return new Triple<InputStream, String, Continuation<StringBuffer>>( bis, null, null );
}
if ( format.equalsIgnoreCase( "xml" ) ) {
// TODO
}
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
return null;
}
private Triple<BufferedImage, String, Continuation<List<BufferedImage>>> parseExternalGraphic(
final XMLStreamReader in )
throws IOException, XMLStreamException {
// TODO color replacement
in.require( START_ELEMENT, null, "ExternalGraphic" );
String format = null;
BufferedImage img = null;
String url = null;
Triple<InputStream, String, Continuation<StringBuffer>> pair = null;
Continuation<List<BufferedImage>> contn = null; // needs to be list to be updateable by reference...
while ( !( in.isEndElement() && in.getLocalName().equals( "ExternalGraphic" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "Format" ) ) {
format = in.getElementText();
} else if ( in.getLocalName().equals( "OnlineResource" ) || in.getLocalName().equals( "InlineContent" ) ) {
pair = getOnlineResourceOrInlineContent( in );
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
try {
if ( pair != null ) {
if ( pair.first != null && format != null && ( format.toLowerCase().indexOf( "svg" ) == -1 ) ) {
img = ImageIO.read( pair.first );
}
url = pair.second;
final Continuation<StringBuffer> sbcontn = pair.third;
if ( pair.third != null ) {
final LinkedHashMap<String, BufferedImage> cache = new LinkedHashMap<String, BufferedImage>( 256 ) {
private static final long serialVersionUID = -6847956873232942891L;
@Override
protected boolean removeEldestEntry( Map.Entry<String, BufferedImage> eldest ) {
return size() > 256; // yeah, hardcoded max size... TODO
}
};
contn = new Continuation<List<BufferedImage>>() {
@Override
public void updateStep( List<BufferedImage> base, Feature f, XPathEvaluator<Feature> evaluator ) {
StringBuffer sb = new StringBuffer();
sbcontn.evaluate( sb, f, evaluator );
String file = sb.toString();
if ( cache.containsKey( file ) ) {
base.add( cache.get( file ) );
return;
}
try {
BufferedImage i = ImageIO.read( resolve( file, in ) );
base.add( i );
cache.put( file, i );
} catch ( MalformedURLException e ) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch ( IOException e ) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
};
}
}
} finally {
if ( pair != null ) {
try {
pair.first.close();
} catch ( Exception e ) {
LOG.trace( "Stack trace when closing input stream:", e );
}
}
}
return new Triple<BufferedImage, String, Continuation<List<BufferedImage>>>( img, url, contn );
}
private Pair<Graphic, Continuation<Graphic>> parseGraphic( XMLStreamReader in )
throws XMLStreamException {
in.require( START_ELEMENT, null, "Graphic" );
Graphic base = new Graphic();
Continuation<Graphic> contn = null;
while ( !( in.isEndElement() && in.getLocalName().equals( "Graphic" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "Mark" ) ) {
final Pair<Mark, Continuation<Mark>> pair = parseMark( in );
if ( pair != null ) {
base.mark = pair.first;
if ( pair.second != null ) {
contn = new Continuation<Graphic>( contn ) {
@Override
public void updateStep( Graphic base, Feature f, XPathEvaluator<Feature> evaluator ) {
pair.second.evaluate( base.mark, f, evaluator );
}
};
}
}
} else if ( in.getLocalName().equals( "ExternalGraphic" ) ) {
try {
final Triple<BufferedImage, String, Continuation<List<BufferedImage>>> p = parseExternalGraphic( in );
if ( p.third != null ) {
contn = new Continuation<Graphic>( contn ) {
@Override
public void updateStep( Graphic base, Feature f, XPathEvaluator<Feature> evaluator ) {
LinkedList<BufferedImage> list = new LinkedList<BufferedImage>();
p.third.evaluate( list, f, evaluator );
base.image = list.poll();
}
};
} else {
base.image = p.first;
base.imageURL = p.second;
}
} catch ( IOException e ) {
LOG.debug( "Stack trace", e );
LOG.warn( get( "R2D.EXTERNAL_GRAPHIC_NOT_LOADED" ),
new Object[] { in.getLocation().getLineNumber(), in.getLocation().getColumnNumber(),
in.getLocation().getSystemId() } );
}
} else if ( in.getLocalName().equals( "Opacity" ) ) {
contn = updateOrContinue( in, "Opacity", base, new Updater<Graphic>() {
public void update( Graphic obj, String val ) {
obj.opacity = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.getLocalName().equals( "Size" ) ) {
contn = updateOrContinue( in, "Size", base, new Updater<Graphic>() {
public void update( Graphic obj, String val ) {
obj.size = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.getLocalName().equals( "Rotation" ) ) {
contn = updateOrContinue( in, "Rotation", base, new Updater<Graphic>() {
public void update( Graphic obj, String val ) {
obj.rotation = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.getLocalName().equals( "AnchorPoint" ) ) {
while ( !( in.isEndElement() && in.getLocalName().equals( "AnchorPoint" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "AnchorPointX" ) ) {
contn = updateOrContinue( in, "AnchorPointX", base, new Updater<Graphic>() {
public void update( Graphic obj, String val ) {
obj.anchorPointX = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.getLocalName().equals( "AnchorPointY" ) ) {
contn = updateOrContinue( in, "AnchorPointY", base, new Updater<Graphic>() {
public void update( Graphic obj, String val ) {
obj.anchorPointY = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
} else if ( in.getLocalName().equals( "Displacement" ) ) {
while ( !( in.isEndElement() && in.getLocalName().equals( "Displacement" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "DisplacementX" ) ) {
contn = updateOrContinue( in, "DisplacementX", base, new Updater<Graphic>() {
public void update( Graphic obj, String val ) {
obj.displacementX = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.getLocalName().equals( "DisplacementY" ) ) {
contn = updateOrContinue( in, "DisplacementY", base, new Updater<Graphic>() {
public void update( Graphic obj, String val ) {
obj.displacementY = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
in.require( END_ELEMENT, null, "Graphic" );
return new Pair<Graphic, Continuation<Graphic>>( base, contn );
}
/**
* @param in
* @param uom
* @return a new symbolizer
* @throws XMLStreamException
*/
public Symbolizer<PointStyling> parsePointSymbolizer( XMLStreamReader in, UOM uom )
throws XMLStreamException {
in.require( START_ELEMENT, null, "PointSymbolizer" );
Common common = new Common( in.getLocation() );
PointStyling baseOrEvaluated = new PointStyling();
baseOrEvaluated.uom = uom;
while ( !( in.isEndElement() && in.getLocalName().equals( "PointSymbolizer" ) ) ) {
in.nextTag();
checkCommon( common, in );
if ( in.getLocalName().equals( "Graphic" ) ) {
final Pair<Graphic, Continuation<Graphic>> pair = parseGraphic( in );
if ( pair == null ) {
return new Symbolizer<PointStyling>( baseOrEvaluated, common.geometry, common.name, common.loc,
common.line, common.col );
}
baseOrEvaluated.graphic = pair.first;
if ( pair.second != null ) {
return new Symbolizer<PointStyling>( baseOrEvaluated, new Continuation<PointStyling>() {
@Override
public void updateStep( PointStyling base, Feature f, XPathEvaluator<Feature> evaluator ) {
pair.second.evaluate( base.graphic, f, evaluator );
}
}, common.geometry, null, common.loc, common.line, common.col );
}
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
in.require( END_ELEMENT, null, "PointSymbolizer" );
return new Symbolizer<PointStyling>( baseOrEvaluated, common.geometry, common.name, common.loc, common.line,
common.col );
}
static UOM getUOM( String uom ) {
if ( uom != null ) {
String u = uom.toLowerCase();
if ( u.endsWith( "metre" ) || u.endsWith( "meter" ) ) {
return Metre;
} else if ( u.endsWith( "mm" ) ) {
return mm;
} else if ( u.endsWith( "foot" ) ) {
return Foot;
} else if ( !u.endsWith( "pixel" ) ) {
LOG.warn( "Unknown unit of measure '{}', using pixel instead.", uom );
}
}
return Pixel;
}
/**
* @param in
* @return the symbolizer
* @throws XMLStreamException
*/
public Triple<Symbolizer<?>, Continuation<StringBuffer>, String> parseSymbolizer( XMLStreamReader in )
throws XMLStreamException {
in.require( START_ELEMENT, null, null );
if ( in.getLocalName().endsWith( "Symbolizer" ) ) {
UOM uom = getUOM( in.getAttributeValue( null, "uom" ) );
if ( in.getLocalName().equals( "PointSymbolizer" ) ) {
return new Triple<Symbolizer<?>, Continuation<StringBuffer>, String>( parsePointSymbolizer( in, uom ),
null, null );
}
if ( in.getLocalName().equals( "LineSymbolizer" ) ) {
return new Triple<Symbolizer<?>, Continuation<StringBuffer>, String>( parseLineSymbolizer( in, uom ),
null, null );
}
if ( in.getLocalName().equals( "PolygonSymbolizer" ) ) {
return new Triple<Symbolizer<?>, Continuation<StringBuffer>, String>(
parsePolygonSymbolizer( in, uom ),
null, null );
}
if ( in.getLocalName().equals( "RasterSymbolizer" ) ) {
return new Triple<Symbolizer<?>, Continuation<StringBuffer>, String>( parseRasterSymbolizer( in, uom ),
null, null );
}
if ( in.getLocalName().equals( "TextSymbolizer" ) ) {
return (Triple) parseTextSymbolizer( in, uom );
}
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
return null;
}
/**
* @param in
* @param uom
* @return the symbolizer
* @throws XMLStreamException
*/
public Symbolizer<RasterStyling> parseRasterSymbolizer( XMLStreamReader in, UOM uom )
throws XMLStreamException {
in.require( START_ELEMENT, null, "RasterSymbolizer" );
Common common = new Common( in.getLocation() );
RasterStyling baseOrEvaluated = new RasterStyling();
baseOrEvaluated.uom = uom;
Continuation<RasterStyling> contn = null;
while ( !( in.isEndElement() && in.getLocalName().equals( "RasterSymbolizer" ) ) ) {
in.nextTag();
checkCommon( common, in );
if ( in.getLocalName().equals( "Opacity" ) ) {
contn = updateOrContinue( in, "Opacity", baseOrEvaluated, new Updater<RasterStyling>() {
@Override
public void update( RasterStyling obj, String val ) {
obj.opacity = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.getLocalName().equals( "ChannelSelection" ) ) {
String red = null, green = null, blue = null, gray = null;
HashMap<String, ContrastEnhancement> enhancements = new HashMap<String, ContrastEnhancement>( 10 );
while ( !( in.isEndElement() && in.getLocalName().equals( "ChannelSelection" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "RedChannel" ) ) {
in.nextTag();
in.require( START_ELEMENT, null, "SourceChannelName" );
red = in.getElementText();
in.nextTag();
ContrastEnhancement enh = parseContrastEnhancement( in );
if ( enh != null ) {
enhancements.put( "red", enh );
}
in.nextTag();
} else if ( in.getLocalName().equals( "GreenChannel" ) ) {
in.nextTag();
in.require( START_ELEMENT, null, "SourceChannelName" );
green = in.getElementText();
in.nextTag();
ContrastEnhancement enh = parseContrastEnhancement( in );
if ( enh != null ) {
enhancements.put( "green", enh );
}
in.nextTag();
} else if ( in.getLocalName().equals( "BlueChannel" ) ) {
in.nextTag();
in.require( START_ELEMENT, null, "SourceChannelName" );
blue = in.getElementText();
in.nextTag();
ContrastEnhancement enh = parseContrastEnhancement( in );
if ( enh != null ) {
enhancements.put( "blue", enh );
}
in.nextTag();
} else if ( in.getLocalName().equals( "GrayChannel" ) ) {
in.nextTag();
in.require( START_ELEMENT, null, "SourceChannelName" );
gray = in.getElementText();
in.nextTag();
ContrastEnhancement enh = parseContrastEnhancement( in );
if ( enh != null ) {
enhancements.put( "gray", enh );
}
in.nextTag();
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
baseOrEvaluated.channelSelection = new RasterChannelSelection( red, green, blue, gray, enhancements );
} else if ( in.getLocalName().equals( "OverlapBehavior" ) ) {
// actual difference between SLD 1.0.0/SE 1.1.0
if ( in.getNamespaceURI().equals( SENS ) ) {
baseOrEvaluated.overlap = Overlap.valueOf( in.getElementText() );
} else {
in.nextTag();
baseOrEvaluated.overlap = Overlap.valueOf( in.getLocalName() );
in.nextTag();
in.nextTag();
}
} else if ( in.getLocalName().equals( "ColorMap" ) ) {
if ( in.getNamespaceURI().equals( SENS ) ) {
in.nextTag();
if ( in.getLocalName().equals( "Categorize" ) ) {
baseOrEvaluated.categorize = new Categorize().parse( in );
} else if ( in.getLocalName().equals( "Interpolate" ) ) {
baseOrEvaluated.interpolate = new Interpolate().parse( in );
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
in.nextTag();
} else {
baseOrEvaluated.interpolate = Interpolate.parseSLD100( in );
}
} else if ( in.getLocalName().equals( "ContrastEnhancement" ) ) {
baseOrEvaluated.contrastEnhancement = parseContrastEnhancement( in );
} else if ( in.getLocalName().equals( "ShadedRelief" ) ) {
baseOrEvaluated.shaded = new ShadedRelief();
while ( !( in.isEndElement() && in.getLocalName().equals( "ShadedRelief" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "BrightnessOnly" ) ) {
baseOrEvaluated.shaded.brightnessOnly = getElementTextAsBoolean( in );
}
if ( in.getLocalName().equals( "ReliefFactor" ) ) {
baseOrEvaluated.shaded.reliefFactor = parseDouble( in.getElementText() );
}
if ( in.getLocalName().equals( "AzimuthAngle" ) ) {
baseOrEvaluated.shaded.azimuthAngle = parseDouble( in.getElementText() );
}
if ( in.getLocalName().equals( "IlluminationAngle" ) ) {
baseOrEvaluated.shaded.alt = parseDouble( in.getElementText() );
}
}
} else if ( in.getLocalName().equals( "ImageOutline" ) ) {
in.nextTag();
if ( in.getLocalName().equals( "LineSymbolizer" ) ) {
baseOrEvaluated.imageOutline = parseLineSymbolizer( in,
getUOM( in.getAttributeValue( null, "uom" ) ) );
}
if ( in.getLocalName().equals( "PolygonSymbolizer" ) ) {
baseOrEvaluated.imageOutline = parsePolygonSymbolizer( in, getUOM( in.getAttributeValue( null,
"uom" ) ) );
}
in.nextTag();
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
in.require( END_ELEMENT, null, "RasterSymbolizer" );
return new Symbolizer<RasterStyling>( baseOrEvaluated, contn, common.geometry, common.name, common.loc,
common.line, common.col );
}
private ContrastEnhancement parseContrastEnhancement( XMLStreamReader in )
throws XMLStreamException {
if ( !in.getLocalName().equals( "ContrastEnhancement" ) ) {
return null;
}
ContrastEnhancement base = new ContrastEnhancement();
while ( !( in.isEndElement() && in.getLocalName().equals( "ContrastEnhancement" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "Normalize" ) ) {
in.nextTag();
base.normalize = true;
} else if ( in.getLocalName().equals( "Histogram" ) ) {
base.histogram = true;
} else if ( in.getLocalName().equals( "GammaValue" ) ) {
base.gamma = parseDouble( in.getElementText() );
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
return base;
}
/**
* @param in
* @param uom
* @return the symbolizer
* @throws XMLStreamException
*/
public Symbolizer<LineStyling> parseLineSymbolizer( XMLStreamReader in, UOM uom )
throws XMLStreamException {
in.require( START_ELEMENT, null, "LineSymbolizer" );
Common common = new Common( in.getLocation() );
LineStyling baseOrEvaluated = new LineStyling();
baseOrEvaluated.uom = uom;
Continuation<LineStyling> contn = null;
while ( !( in.isEndElement() && in.getLocalName().equals( "LineSymbolizer" ) ) ) {
in.nextTag();
checkCommon( common, in );
if ( in.getLocalName().equals( "Stroke" ) ) {
final Pair<Stroke, Continuation<Stroke>> pair = parseStroke( in );
if ( pair != null ) {
baseOrEvaluated.stroke = pair.first;
if ( pair.second != null ) {
contn = new Continuation<LineStyling>( contn ) {
@Override
public void updateStep( LineStyling base, Feature f, XPathEvaluator<Feature> evaluator ) {
pair.second.evaluate( base.stroke, f, evaluator );
}
};
}
}
} else if ( in.getLocalName().equals( "PerpendicularOffset" ) ) {
baseOrEvaluated.perpendicularOffsetType = getPerpendicularOffsetType( in );
contn = updateOrContinue( in, "PerpendicularOffset", baseOrEvaluated, new Updater<LineStyling>() {
@Override
public void update( LineStyling obj, String val ) {
obj.perpendicularOffset = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
if ( contn == null ) {
return new Symbolizer<LineStyling>( baseOrEvaluated, common.geometry, common.name, common.loc, common.line,
common.col );
}
return new Symbolizer<LineStyling>( baseOrEvaluated, contn, common.geometry, common.name, common.loc,
common.line, common.col );
}
/**
* @param in
* @param uom
* @return the symbolizer
* @throws XMLStreamException
*/
public Symbolizer<PolygonStyling> parsePolygonSymbolizer( XMLStreamReader in, UOM uom )
throws XMLStreamException {
in.require( START_ELEMENT, null, "PolygonSymbolizer" );
Common common = new Common( in.getLocation() );
PolygonStyling baseOrEvaluated = new PolygonStyling();
baseOrEvaluated.uom = uom;
Continuation<PolygonStyling> contn = null;
while ( !( in.isEndElement() && in.getLocalName().equals( "PolygonSymbolizer" ) ) ) {
in.nextTag();
checkCommon( common, in );
if ( in.getLocalName().equals( "Stroke" ) ) {
final Pair<Stroke, Continuation<Stroke>> pair = parseStroke( in );
if ( pair != null ) {
baseOrEvaluated.stroke = pair.first;
if ( pair.second != null ) {
contn = new Continuation<PolygonStyling>( contn ) {
@Override
public void updateStep( PolygonStyling base, Feature f, XPathEvaluator<Feature> evaluator ) {
pair.second.evaluate( base.stroke, f, evaluator );
}
};
}
}
} else if ( in.getLocalName().equals( "Fill" ) ) {
final Pair<Fill, Continuation<Fill>> fillPair = parseFill( in );
if ( fillPair != null ) {
baseOrEvaluated.fill = fillPair.first;
if ( fillPair.second != null ) {
contn = new Continuation<PolygonStyling>( contn ) {
@Override
public void updateStep( PolygonStyling base, Feature f, XPathEvaluator<Feature> evaluator ) {
fillPair.second.evaluate( base.fill, f, evaluator );
}
};
}
}
} else if ( in.getLocalName().equals( "PerpendicularOffset" ) ) {
baseOrEvaluated.perpendicularOffsetType = getPerpendicularOffsetType( in );
contn = updateOrContinue( in, "PerpendicularOffset", baseOrEvaluated, new Updater<PolygonStyling>() {
@Override
public void update( PolygonStyling obj, String val ) {
obj.perpendicularOffset = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.getLocalName().equals( "Displacement" ) ) {
while ( !( in.isEndElement() && in.getLocalName().equals( "Displacement" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "DisplacementX" ) ) {
contn = updateOrContinue( in, "DisplacementX", baseOrEvaluated, new Updater<PolygonStyling>() {
@Override
public void update( PolygonStyling obj, String val ) {
obj.displacementX = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.getLocalName().equals( "DisplacementY" ) ) {
contn = updateOrContinue( in, "DisplacementY", baseOrEvaluated, new Updater<PolygonStyling>() {
@Override
public void update( PolygonStyling obj, String val ) {
obj.displacementY = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
if ( contn == null ) {
return new Symbolizer<PolygonStyling>( baseOrEvaluated, common.geometry, common.name, common.loc,
common.line, common.col );
}
return new Symbolizer<PolygonStyling>( baseOrEvaluated, contn, common.geometry, common.name, common.loc,
common.line, common.col );
}
/**
* @param <T>
* @param in
* @param name
* @param obj
* @param updater
* @param contn
* @return either contn, or a new continuation which updates obj, also the XML snippet (w/ filter expressions
* re-exported) which was parsed (or null, if none was parsed)
* @throws XMLStreamException
*/
public <T> Pair<String, Continuation<T>> updateOrContinue( XMLStreamReader in, String name, T obj,
final Updater<T> updater, Continuation<T> contn )
throws XMLStreamException {
StringBuilder xmlText = collectXMLSnippets ? new StringBuilder() : null;
if ( in.getLocalName().endsWith( name ) ) {
final LinkedList<Pair<String, Pair<Expression, String>>> text = new LinkedList<Pair<String, Pair<Expression, String>>>(); // no
// real 'alternative', have we?
boolean textOnly = true;
while ( !( in.isEndElement() && in.getLocalName().endsWith( name ) ) ) {
in.next();
if ( in.isStartElement() ) {
Expression expr = parseExpression( in );
if ( collectXMLSnippets ) {
StringWriter sw = new StringWriter();
XMLStreamWriter out = XMLOutputFactory.newInstance().createXMLStreamWriter( sw );
Filter110XMLEncoder.export( expr, out );
xmlText.append( sw.toString() );
}
Pair<Expression, String> second;
second = new Pair<Expression, String>( expr, get( "R2D.LINE", in.getLocation().getLineNumber(),
in.getLocation().getColumnNumber(),
in.getLocation().getSystemId() ) );
text.add( new Pair<String, Pair<Expression, String>>( null, second ) );
textOnly = false;
}
if ( in.isCharacters() ) {
if ( collectXMLSnippets ) {
xmlText.append( in.getText() );
}
if ( textOnly && !text.isEmpty() ) { // concat text in case of multiple text nodes from
// beginning
String txt = text.removeLast().first;
text.add( new Pair<String, Pair<Expression, String>>( txt + in.getText().trim(), null ) );
} else {
text.add( new Pair<String, Pair<Expression, String>>( in.getText().trim(), null ) );
}
}
}
in.require( END_ELEMENT, null, null );
if ( textOnly ) {
if ( text.isEmpty() ) {
LOG.warn( "Expression was empty at line {}, column {}.", in.getLocation().getLineNumber(),
in.getLocation().getColumnNumber() );
}
updater.update( obj, text.isEmpty() ? "" : text.getFirst().first );
} else {
contn = new Continuation<T>( contn ) {
@Override
public void updateStep( T base, Feature f, XPathEvaluator<Feature> evaluator ) {
StringBuilder tmp = new StringBuilder();
for ( Pair<String, Pair<Expression, String>> p : text ) {
if ( p.first != null ) {
tmp.append( p.first );
}
if ( p.second != null ) {
try {
TypedObjectNode[] evald = p.second.first.evaluate( f, evaluator );
if ( evald.length == 0 ) {
LOG.warn( get( "R2D.EXPRESSION_TO_NULL" ), p.second.second );
} else {
tmp.append( evald[0] );
}
} catch ( FilterEvaluationException e ) {
LOG.warn( get( "R2D.ERROR_EVAL" ), e.getLocalizedMessage(), p.second.second );
}
}
}
updater.update( base, tmp.toString() );
}
};
}
}
return new Pair<String, Continuation<T>>( collectXMLSnippets ? xmlText.toString().trim() : null, contn );
}
/**
* @param in
* @param uom
* @return the symbolizer
* @throws XMLStreamException
*/
public Triple<Symbolizer<TextStyling>, Continuation<StringBuffer>, String> parseTextSymbolizer( XMLStreamReader in,
UOM uom )
throws XMLStreamException {
in.require( START_ELEMENT, null, "TextSymbolizer" );
Common common = new Common( in.getLocation() );
TextStyling baseOrEvaluated = new TextStyling();
baseOrEvaluated.uom = uom;
Continuation<TextStyling> contn = null;
Continuation<StringBuffer> label = null;
String xmlText = null;
while ( !( in.isEndElement() && in.getLocalName().equals( "TextSymbolizer" ) ) ) {
in.nextTag();
checkCommon( common, in );
if ( in.getLocalName().equals( "Label" ) ) {
Pair<String, Continuation<StringBuffer>> res = updateOrContinue( in, "Label", new StringBuffer(),
new Updater<StringBuffer>() {
@Override
public void update(
StringBuffer obj,
String val ) {
obj.append( val );
}
}, null );
xmlText = res.first;
label = res.second;
} else if ( in.getLocalName().equals( "LabelPlacement" ) ) {
while ( !( in.isEndElement() && in.getLocalName().equalsIgnoreCase( "LabelPlacement" ) ) ) {
in.nextTag();
if ( in.getLocalName().equalsIgnoreCase( "PointPlacement" ) ) {
while ( !( in.isEndElement() && in.getLocalName().equals( "PointPlacement" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "AnchorPoint" ) ) {
while ( !( in.isEndElement() && in.getLocalName().equals( "AnchorPoint" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "AnchorPointX" ) ) {
contn = updateOrContinue( in, "AnchorPointX", baseOrEvaluated,
new Updater<TextStyling>() {
@Override
public void update( TextStyling obj, String val ) {
obj.anchorPointX = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.getLocalName().equals( "AnchorPointY" ) ) {
contn = updateOrContinue( in, "AnchorPointY", baseOrEvaluated,
new Updater<TextStyling>() {
@Override
public void update( TextStyling obj, String val ) {
obj.anchorPointY = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(),
loc.getColumnNumber() } );
skipElement( in );
}
}
} else if ( in.getLocalName().equals( "Displacement" ) ) {
while ( !( in.isEndElement() && in.getLocalName().equals( "Displacement" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "DisplacementX" ) ) {
contn = updateOrContinue( in, "DisplacementX", baseOrEvaluated,
new Updater<TextStyling>() {
@Override
public void update( TextStyling obj, String val ) {
obj.displacementX = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.getLocalName().equals( "DisplacementY" ) ) {
contn = updateOrContinue( in, "DisplacementY", baseOrEvaluated,
new Updater<TextStyling>() {
@Override
public void update( TextStyling obj, String val ) {
obj.displacementY = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(),
loc.getColumnNumber() } );
skipElement( in );
}
}
} else if ( in.getLocalName().equals( "Rotation" ) ) {
contn = updateOrContinue( in, "Rotation", baseOrEvaluated, new Updater<TextStyling>() {
@Override
public void update( TextStyling obj, String val ) {
obj.rotation = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error(
"Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
}
if ( in.getLocalName().equals( "LinePlacement" ) ) {
final Pair<LinePlacement, Continuation<LinePlacement>> pair = parseLinePlacement( in );
if ( pair != null ) {
baseOrEvaluated.linePlacement = pair.first;
if ( pair.second != null ) {
contn = new Continuation<TextStyling>( contn ) {
@Override
public void updateStep( TextStyling base, Feature f,
XPathEvaluator<Feature> evaluator ) {
pair.second.evaluate( base.linePlacement, f, evaluator );
}
};
}
}
}
}
} else if ( in.getLocalName().equals( "Halo" ) ) {
final Pair<Halo, Continuation<Halo>> haloPair = parseHalo( in );
if ( haloPair != null ) {
baseOrEvaluated.halo = haloPair.first;
if ( haloPair.second != null ) {
contn = new Continuation<TextStyling>( contn ) {
@Override
public void updateStep( TextStyling base, Feature f, XPathEvaluator<Feature> evaluator ) {
haloPair.second.evaluate( base.halo, f, evaluator );
}
};
}
}
} else if ( in.getLocalName().equals( "Font" ) ) {
final Pair<Font, Continuation<Font>> fontPair = parseFont( in );
if ( fontPair != null ) {
baseOrEvaluated.font = fontPair.first;
if ( fontPair.second != null ) {
contn = new Continuation<TextStyling>( contn ) {
@Override
public void updateStep( TextStyling base, Feature f, XPathEvaluator<Feature> evaluator ) {
fontPair.second.evaluate( base.font, f, evaluator );
}
};
}
}
} else if ( in.getLocalName().equals( "Fill" ) ) {
final Pair<Fill, Continuation<Fill>> fillPair = parseFill( in );
if ( fillPair != null ) {
baseOrEvaluated.fill = fillPair.first;
if ( fillPair.second != null ) {
contn = new Continuation<TextStyling>( contn ) {
@Override
public void updateStep( TextStyling base, Feature f, XPathEvaluator<Feature> evaluator ) {
fillPair.second.evaluate( base.fill, f, evaluator );
}
};
}
}
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
if ( contn == null ) {
Symbolizer<TextStyling> sym = new Symbolizer<TextStyling>( baseOrEvaluated, common.geometry, common.name,
common.loc, common.line, common.col );
return new Triple<Symbolizer<TextStyling>, Continuation<StringBuffer>, String>( sym, label, xmlText );
}
Symbolizer<TextStyling> sym = new Symbolizer<TextStyling>( baseOrEvaluated, contn, common.geometry,
common.name, common.loc, common.line, common.col );
return new Triple<Symbolizer<TextStyling>, Continuation<StringBuffer>, String>( sym, label, xmlText );
}
private Pair<Font, Continuation<Font>> parseFont( XMLStreamReader in )
throws XMLStreamException {
in.require( START_ELEMENT, null, "Font" );
Font baseOrEvaluated = new Font();
Continuation<Font> contn = null;
while ( !( in.isEndElement() && in.getLocalName().equals( "Font" ) ) ) {
in.nextTag();
if ( in.getLocalName().endsWith( "Parameter" ) ) {
String name = in.getAttributeValue( null, "name" );
if ( name.equals( "font-family" ) ) {
contn = updateOrContinue( in, "Parameter", baseOrEvaluated, new Updater<Font>() {
@Override
public void update( Font obj, String val ) {
obj.fontFamily.add( val );
}
}, contn ).second;
} else if ( name.equals( "font-style" ) ) {
contn = updateOrContinue( in, "Parameter", baseOrEvaluated, new Updater<Font>() {
@Override
public void update( Font obj, String val ) {
obj.fontStyle = Style.valueOf( val.toUpperCase() );
}
}, contn ).second;
} else if ( name.equals( "font-weight" ) ) {
contn = updateOrContinue( in, "Parameter", baseOrEvaluated, new Updater<Font>() {
@Override
public void update( Font obj, String val ) {
obj.bold = val.equalsIgnoreCase( "bold" );
}
}, contn ).second;
} else if ( name.equals( "font-size" ) ) {
contn = updateOrContinue( in, "Parameter", baseOrEvaluated, new Updater<Font>() {
@Override
public void update( Font obj, String val ) {
obj.fontSize = Double.parseDouble( val );
}
}, contn ).second;
} else if ( name.equals( "font-color" ) ) {
skipElement( in );
LOG.warn( "The non-standard font-color Svg/CssParameter is not supported any more. Use a standard Fill element instead." );
} else {
in.getElementText();
LOG.warn( "The non-standard '{}' Svg/CssParameter is not supported.", name );
}
}
}
return new Pair<Font, Continuation<Font>>( baseOrEvaluated, contn );
}
private Pair<Halo, Continuation<Halo>> parseHalo( XMLStreamReader in )
throws XMLStreamException {
in.require( START_ELEMENT, null, "Halo" );
Halo baseOrEvaluated = new Halo();
Continuation<Halo> contn = null;
while ( !( in.isEndElement() && in.getLocalName().equals( "Halo" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "Radius" ) ) {
contn = updateOrContinue( in, "Radius", baseOrEvaluated, new Updater<Halo>() {
@Override
public void update( Halo obj, String val ) {
obj.radius = Double.parseDouble( val );
}
}, contn ).second;
}
if ( in.getLocalName().equals( "Fill" ) ) {
final Pair<Fill, Continuation<Fill>> fillPair = parseFill( in );
if ( fillPair != null ) {
baseOrEvaluated.fill = fillPair.first;
if ( fillPair.second != null ) {
contn = new Continuation<Halo>( contn ) {
@Override
public void updateStep( Halo base, Feature f, XPathEvaluator<Feature> evaluator ) {
fillPair.second.evaluate( base.fill, f, evaluator );
}
};
}
}
}
}
return new Pair<Halo, Continuation<Halo>>( baseOrEvaluated, contn );
}
private static PerpendicularOffsetType getPerpendicularOffsetType( XMLStreamReader in ) {
PerpendicularOffsetType tp = new PerpendicularOffsetType();
String type = in.getAttributeValue( null, "type" );
if ( type != null ) {
try {
tp.type = Type.valueOf( type );
} catch ( IllegalArgumentException e ) {
LOG.debug( "Stack trace:", e );
LOG.warn( "The value '{}' is not a valid type for perpendicular offsets. Valid types are: {}", type,
Arrays.toString( Type.values() ) );
}
}
String substraction = in.getAttributeValue( null, "substraction" );
if ( substraction != null ) {
try {
tp.substraction = Substraction.valueOf( substraction );
} catch ( IllegalArgumentException e ) {
LOG.debug( "Stack trace:", e );
LOG.warn( "The value '{}' is not a valid substraction type for perpendicular offsets."
+ " Valid types are: {}", substraction, Arrays.toString( Substraction.values() ) );
}
}
return tp;
}
private Pair<LinePlacement, Continuation<LinePlacement>> parseLinePlacement( XMLStreamReader in )
throws XMLStreamException {
in.require( START_ELEMENT, null, "LinePlacement" );
LinePlacement baseOrEvaluated = new LinePlacement();
Continuation<LinePlacement> contn = null;
while ( !( in.isEndElement() && in.getLocalName().equals( "LinePlacement" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "PerpendicularOffset" ) ) {
baseOrEvaluated.perpendicularOffsetType = getPerpendicularOffsetType( in );
contn = updateOrContinue( in, "PerpendicularOffset", baseOrEvaluated, new Updater<LinePlacement>() {
@Override
public void update( LinePlacement obj, String val ) {
obj.perpendicularOffset = Double.parseDouble( val );
}
}, contn ).second;
}
if ( in.getLocalName().equals( "InitialGap" ) ) {
contn = updateOrContinue( in, "InitialGap", baseOrEvaluated, new Updater<LinePlacement>() {
@Override
public void update( LinePlacement obj, String val ) {
obj.initialGap = Double.parseDouble( val );
}
}, contn ).second;
}
if ( in.getLocalName().equals( "Gap" ) ) {
contn = updateOrContinue( in, "Gap", baseOrEvaluated, new Updater<LinePlacement>() {
@Override
public void update( LinePlacement obj, String val ) {
obj.gap = Double.parseDouble( val );
}
}, contn ).second;
}
if ( in.getLocalName().equals( "GeneralizeLine" ) ) {
contn = updateOrContinue( in, "GeneralizeLine", baseOrEvaluated, new Updater<LinePlacement>() {
@Override
public void update( LinePlacement obj, String val ) {
obj.generalizeLine = Boolean.parseBoolean( val );
}
}, contn ).second;
}
if ( in.getLocalName().equals( "IsAligned" ) ) {
contn = updateOrContinue( in, "IsAligned", baseOrEvaluated, new Updater<LinePlacement>() {
@Override
public void update( LinePlacement obj, String val ) {
obj.isAligned = Boolean.parseBoolean( val );
}
}, contn ).second;
}
if ( in.getLocalName().equals( "IsRepeated" ) ) {
contn = updateOrContinue( in, "IsRepeated", baseOrEvaluated, new Updater<LinePlacement>() {
@Override
public void update( LinePlacement obj, String val ) {
obj.repeat = Boolean.parseBoolean( val );
}
}, contn ).second;
}
}
return new Pair<LinePlacement, Continuation<LinePlacement>>( baseOrEvaluated, contn );
}
/**
* @param in
* @return null, if no symbolizer and no Feature type style was found
* @throws XMLStreamException
*/
public org.deegree.rendering.r2d.se.unevaluated.Style parse( XMLStreamReader in )
throws XMLStreamException {
if ( in.getEventType() == START_DOCUMENT ) {
in.nextTag();
}
if ( in.getLocalName().endsWith( "Symbolizer" ) ) {
Triple<Symbolizer<?>, Continuation<StringBuffer>, String> pair = parseSymbolizer( in );
return new org.deegree.rendering.r2d.se.unevaluated.Style( pair.first, pair.second, pair.first.getName(),
pair.third );
}
if ( in.getLocalName().equals( "FeatureTypeStyle" ) ) {
return parseFeatureTypeOrCoverageStyle( in );
}
LOG.warn( "Symbology file '{}' did not contain symbolizer or feature type style.",
in.getLocation().getSystemId() );
return null;
}
/**
* @param in
* @return a new style
* @throws XMLStreamException
*/
public org.deegree.rendering.r2d.se.unevaluated.Style parseFeatureTypeOrCoverageStyle( XMLStreamReader in )
throws XMLStreamException {
if ( in.getLocalName().equals( "OnlineResource" ) ) {
try {
URL url = SymbologyParser.parseOnlineResource( in );
XMLStreamReader newReader = XMLInputFactory.newInstance().createXMLStreamReader( url.toString(),
url.openStream() );
return parseFeatureTypeOrCoverageStyle( newReader );
} catch ( MalformedURLException e ) {
LOG.warn( "An URL referencing a FeatureType or CoverageStyle could not be resolved." );
LOG.debug( "Stack trace:", e );
} catch ( FactoryConfigurationError e ) {
LOG.warn( "An URL referencing a FeatureType or CoverageStyle could not be read." );
LOG.debug( "Stack trace:", e );
} catch ( IOException e ) {
LOG.warn( "An URL referencing a FeatureType or CoverageStyle could not be read." );
LOG.debug( "Stack trace:", e );
}
}
LinkedList<Pair<Continuation<LinkedList<Symbolizer<?>>>, DoublePair>> result = new LinkedList<Pair<Continuation<LinkedList<Symbolizer<?>>>, DoublePair>>();
HashMap<Symbolizer<TextStyling>, Continuation<StringBuffer>> labels = new HashMap<Symbolizer<TextStyling>, Continuation<StringBuffer>>();
HashMap<Symbolizer<TextStyling>, String> labelXMLTexts = collectXMLSnippets ? new HashMap<Symbolizer<TextStyling>, String>()
: null;
Common common = new Common( in.getLocation() );
QName featureTypeName = null;
while ( !( in.isEndElement() && ( in.getLocalName().equals( "FeatureTypeStyle" ) || in.getLocalName().equals(
"CoverageStyle" ) ) ) ) {
in.nextTag();
checkCommon( common, in );
// TODO unused
if ( in.getLocalName().equals( "SemanticTypeIdentifier" ) ) {
in.getElementText(); // AndThrowItAwayImmediately
}
if ( in.getLocalName().equals( "FeatureTypeName" ) ) {
featureTypeName = getElementTextAsQName( in );
}
// TODO unused
if ( in.getLocalName().equals( "CoverageName" ) ) {
in.getElementText(); // AndThrowItAwayImmediately
}
if ( in.getLocalName().equals( "Rule" ) || in.getLocalName().equals( "OnlineResource" ) ) {
XMLStreamReader localReader = in;
if ( in.getLocalName().equals( "OnlineResource" ) ) {
try {
URL url = parseOnlineResource( in );
localReader = XMLInputFactory.newInstance().createXMLStreamReader( url.toString(),
url.openStream() );
} catch ( IOException e ) {
LOG.warn( "Error '{}' while resolving/accessing remote Rule document.", e.getLocalizedMessage() );
LOG.debug( "Stack trace:", e );
}
}
Common ruleCommon = new Common( in.getLocation() );
double minScale = NEGATIVE_INFINITY;
double maxScale = POSITIVE_INFINITY;
Filter filter = null;
LinkedList<Symbolizer<?>> syms = new LinkedList<Symbolizer<?>>();
while ( !( localReader.isEndElement() && localReader.getLocalName().equals( "Rule" ) ) ) {
localReader.nextTag();
checkCommon( ruleCommon, localReader );
if ( localReader.getLocalName().equals( "Filter" ) ) {
filter = Filter110XMLDecoder.parse( localReader );
}
if ( localReader.getLocalName().equals( "ElseFilter" ) ) {
filter = ELSEFILTER;
localReader.nextTag();
}
if ( localReader.getLocalName().equals( "MinScaleDenominator" ) ) {
minScale = parseDouble( localReader.getElementText() );
}
if ( localReader.getLocalName().equals( "MaxScaleDenominator" ) ) {
maxScale = parseDouble( localReader.getElementText() );
}
// TODO legendgraphic
if ( localReader.isStartElement() && localReader.getLocalName().endsWith( "Symbolizer" ) ) {
Triple<Symbolizer<?>, Continuation<StringBuffer>, String> parsedSym = parseSymbolizer( localReader );
if ( parsedSym.second != null ) {
labels.put( (Symbolizer) parsedSym.first, parsedSym.second );
}
if ( collectXMLSnippets && parsedSym.third != null ) {
labelXMLTexts.put( (Symbolizer) parsedSym.first, parsedSym.third );
}
syms.add( parsedSym.first );
}
}
FilterContinuation contn = new FilterContinuation( filter, syms, ruleCommon );
DoublePair scales = new DoublePair( minScale, maxScale );
result.add( new Pair<Continuation<LinkedList<Symbolizer<?>>>, DoublePair>( contn, scales ) );
}
}
return new org.deegree.rendering.r2d.se.unevaluated.Style( result, labels, labelXMLTexts, common.name,
featureTypeName );
}
static class ElseFilter implements Filter {
@Override
public <T> boolean evaluate( T object, XPathEvaluator<T> evaluator )
throws FilterEvaluationException {
return false; // always to false, has to be checked differently, see FilterContinuation below
}
@Override
public Type getType() {
return null;
}
}
/**
* <code>FilterContinuation</code>
*
* @author <a href="mailto:[email protected]">Andreas Schmitz</a>
* @author last edited by: $Author$
*
* @version $Revision$, $Date$
*/
public static class FilterContinuation extends Continuation<LinkedList<Symbolizer<?>>> {
/***/
public Filter filter;
private LinkedList<Symbolizer<?>> syms;
/** Contains description and so on. */
public Common common;
public FilterContinuation( Filter filter, LinkedList<Symbolizer<?>> syms, Common common ) {
this.filter = filter;
this.syms = syms;
this.common = common;
}
@Override
public void updateStep( LinkedList<Symbolizer<?>> base, Feature f, XPathEvaluator<Feature> evaluator ) {
try {
if ( filter == null || f == null || filter.evaluate( f, evaluator )
|| ( base.isEmpty() && filter == ELSEFILTER ) ) {
base.addAll( syms );
}
} catch ( FilterEvaluationException e ) {
LOG.warn( get( "R2D.ERROR_EVAL" ), e.getLocalizedMessage(), filter.toString() );
LOG.debug( "Stack trace:", e );
}
}
}
/**
* <code>Common</code>
*
* @author <a href="mailto:[email protected]">Andreas Schmitz</a>
* @author last edited by: $Author$
*
* @version $Revision$, $Date$
*/
public static class Common {
public Common() {
// without location
}
Common( Location loc ) {
this.loc = loc.getSystemId();
line = loc.getLineNumber();
col = loc.getColumnNumber();
}
/***/
public String name;
/***/
public String title;
/***/
public String abstract_;
Expression geometry;
String loc;
int line, col;
}
} | deegree-core/deegree-core-rendering-2d/src/main/java/org/deegree/rendering/r2d/se/parser/SymbologyParser.java | //$HeadURL$
/*----------------------------------------------------------------------------
This file is part of deegree, http://deegree.org/
Copyright (C) 2001-2009 by:
Department of Geography, University of Bonn
and
lat/lon GmbH
This library is free software; you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License as published by the Free
Software Foundation; either version 2.1 of the License, or (at your option)
any later version.
This library is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
details.
You should have received a copy of the GNU Lesser General Public License
along with this library; if not, write to the Free Software Foundation, Inc.,
59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
Contact information:
lat/lon GmbH
Aennchenstr. 19, 53177 Bonn
Germany
http://lat-lon.de/
Department of Geography, University of Bonn
Prof. Dr. Klaus Greve
Postfach 1147, 53001 Bonn
Germany
http://www.geographie.uni-bonn.de/deegree/
e-mail: [email protected]
----------------------------------------------------------------------------*/
package org.deegree.rendering.r2d.se.parser;
import static java.awt.Font.TRUETYPE_FONT;
import static java.awt.Font.TYPE1_FONT;
import static java.awt.Font.createFont;
import static java.lang.Double.NEGATIVE_INFINITY;
import static java.lang.Double.POSITIVE_INFINITY;
import static java.lang.Double.parseDouble;
import static java.lang.Float.parseFloat;
import static java.lang.Math.max;
import static java.lang.Math.min;
import static javax.xml.stream.XMLStreamConstants.END_ELEMENT;
import static javax.xml.stream.XMLStreamConstants.START_DOCUMENT;
import static javax.xml.stream.XMLStreamConstants.START_ELEMENT;
import static org.deegree.commons.utils.ArrayUtils.splitAsDoubles;
import static org.deegree.commons.utils.ColorUtils.decodeWithAlpha;
import static org.deegree.commons.xml.CommonNamespaces.SENS;
import static org.deegree.commons.xml.CommonNamespaces.XLNNS;
import static org.deegree.commons.xml.stax.StAXParsingHelper.getElementTextAsBoolean;
import static org.deegree.commons.xml.stax.StAXParsingHelper.getElementTextAsQName;
import static org.deegree.commons.xml.stax.StAXParsingHelper.resolve;
import static org.deegree.commons.xml.stax.StAXParsingHelper.skipElement;
import static org.deegree.filter.xml.Filter110XMLDecoder.parseExpression;
import static org.deegree.rendering.i18n.Messages.get;
import static org.deegree.rendering.r2d.se.unevaluated.Continuation.SBUPDATER;
import static org.deegree.rendering.r2d.styling.components.Stroke.LineCap.BUTT;
import static org.deegree.rendering.r2d.styling.components.Stroke.LineJoin.ROUND;
import static org.deegree.rendering.r2d.styling.components.UOM.Foot;
import static org.deegree.rendering.r2d.styling.components.UOM.Metre;
import static org.deegree.rendering.r2d.styling.components.UOM.Pixel;
import static org.deegree.rendering.r2d.styling.components.UOM.mm;
import static org.slf4j.LoggerFactory.getLogger;
import java.awt.Color;
import java.awt.FontFormatException;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringWriter;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.imageio.ImageIO;
import javax.xml.namespace.QName;
import javax.xml.stream.FactoryConfigurationError;
import javax.xml.stream.Location;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.stream.XMLStreamWriter;
import org.apache.xerces.impl.dv.util.Base64;
import org.deegree.commons.tom.TypedObjectNode;
import org.deegree.commons.utils.DoublePair;
import org.deegree.commons.utils.Pair;
import org.deegree.commons.utils.Triple;
import org.deegree.commons.xml.stax.StAXParsingHelper;
import org.deegree.feature.Feature;
import org.deegree.filter.Expression;
import org.deegree.filter.Filter;
import org.deegree.filter.FilterEvaluationException;
import org.deegree.filter.XPathEvaluator;
import org.deegree.filter.expression.custom.se.Categorize;
import org.deegree.filter.expression.custom.se.Interpolate;
import org.deegree.filter.xml.Filter110XMLDecoder;
import org.deegree.filter.xml.Filter110XMLEncoder;
import org.deegree.rendering.r2d.RenderHelper;
import org.deegree.rendering.r2d.se.unevaluated.Continuation;
import org.deegree.rendering.r2d.se.unevaluated.Symbolizer;
import org.deegree.rendering.r2d.se.unevaluated.Continuation.Updater;
import org.deegree.rendering.r2d.styling.LineStyling;
import org.deegree.rendering.r2d.styling.PointStyling;
import org.deegree.rendering.r2d.styling.PolygonStyling;
import org.deegree.rendering.r2d.styling.RasterChannelSelection;
import org.deegree.rendering.r2d.styling.RasterStyling;
import org.deegree.rendering.r2d.styling.TextStyling;
import org.deegree.rendering.r2d.styling.RasterStyling.ContrastEnhancement;
import org.deegree.rendering.r2d.styling.RasterStyling.Overlap;
import org.deegree.rendering.r2d.styling.RasterStyling.ShadedRelief;
import org.deegree.rendering.r2d.styling.components.Fill;
import org.deegree.rendering.r2d.styling.components.Font;
import org.deegree.rendering.r2d.styling.components.Graphic;
import org.deegree.rendering.r2d.styling.components.Halo;
import org.deegree.rendering.r2d.styling.components.LinePlacement;
import org.deegree.rendering.r2d.styling.components.Mark;
import org.deegree.rendering.r2d.styling.components.PerpendicularOffsetType;
import org.deegree.rendering.r2d.styling.components.Stroke;
import org.deegree.rendering.r2d.styling.components.UOM;
import org.deegree.rendering.r2d.styling.components.Font.Style;
import org.deegree.rendering.r2d.styling.components.Mark.SimpleMark;
import org.deegree.rendering.r2d.styling.components.PerpendicularOffsetType.Substraction;
import org.deegree.rendering.r2d.styling.components.PerpendicularOffsetType.Type;
import org.deegree.rendering.r2d.styling.components.Stroke.LineCap;
import org.deegree.rendering.r2d.styling.components.Stroke.LineJoin;
import org.slf4j.Logger;
/**
* <code>SymbologyParser</code> parses the SE part of 1.1.0 and the corresponding SLD 1.0.0 part.
*
* @author <a href="mailto:[email protected]">Andreas Schmitz</a>
* @author last edited by: $Author$
*
* @version $Revision$, $Date$
*/
public class SymbologyParser {
private boolean collectXMLSnippets = false;
static final Logger LOG = getLogger( SymbologyParser.class );
/**
* A static elsefilter instance (think of it as a marker).
*/
public static final ElseFilter ELSEFILTER = new ElseFilter();
/**
* A default instance.
*/
public static final SymbologyParser INSTANCE = new SymbologyParser();
/**
* Constructs one which does not collect source snippets.
*/
public SymbologyParser() {
// default values
}
/**
* @param collectXMLSnippets
* if true, some source snippets are collected (which can be used for re-export)
*/
public SymbologyParser( boolean collectXMLSnippets ) {
this.collectXMLSnippets = collectXMLSnippets;
}
private static boolean require( XMLStreamReader in, String elementName ) {
if ( !( in.getLocalName().equals( elementName ) && in.isStartElement() ) ) {
Location loc = in.getLocation();
LOG.error( "Expected a '{}' element at line {} column {}.", new Object[] { elementName,
loc.getLineNumber(),
loc.getColumnNumber() } );
return false;
}
return true;
}
/**
* @param in
* @return the resolved href attribute
* @throws XMLStreamException
* @throws MalformedURLException
*/
public static URL parseOnlineResource( XMLStreamReader in )
throws XMLStreamException, MalformedURLException {
if ( !require( in, "OnlineResource" ) ) {
return null;
}
String url = in.getAttributeValue( null, "href" );
URL resolved = StAXParsingHelper.resolve( url, in );
in.nextTag();
in.require( END_ELEMENT, null, "OnlineResource" );
return resolved;
}
private static void checkCommon( Common common, XMLStreamReader in )
throws XMLStreamException {
if ( in.getLocalName().equals( "Name" ) ) {
common.name = in.getElementText();
}
Location l = in.getLocation();
if ( in.getLocalName().equals( "Geometry" ) ) {
common.loc = l.getSystemId();
common.line = l.getLineNumber();
common.col = l.getColumnNumber();
in.nextTag();
common.geometry = parseExpression( in );
in.nextTag();
in.require( END_ELEMENT, null, "Geometry" );
}
if ( in.getLocalName().equals( "Description" ) ) {
while ( !( in.isEndElement() && in.getLocalName().equals( "Description" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "Title" ) ) {
common.title = in.getElementText();
} else if ( in.getLocalName().equals( "Abstract" ) ) {
common.abstract_ = in.getElementText();
} else if ( in.isStartElement() ) {
Location loc = l;
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
}
// in case of SLD 1.0.0:
if ( in.getLocalName().equals( "Title" ) ) {
common.title = in.getElementText();
in.nextTag();
}
if ( in.getLocalName().equals( "Abstract" ) ) {
common.abstract_ = in.getElementText();
in.nextTag();
}
}
private Pair<Fill, Continuation<Fill>> parseFill( XMLStreamReader in )
throws XMLStreamException {
in.require( START_ELEMENT, null, "Fill" );
Fill base = new Fill();
Continuation<Fill> contn = null;
while ( !( in.isEndElement() && in.getLocalName().equals( "Fill" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "GraphicFill" ) ) {
in.nextTag();
final Pair<Graphic, Continuation<Graphic>> pair = parseGraphic( in );
if ( pair != null ) {
base.graphic = pair.first;
if ( pair.second != null ) {
contn = new Continuation<Fill>( contn ) {
@Override
public void updateStep( Fill base, Feature f, XPathEvaluator<Feature> evaluator ) {
pair.second.evaluate( base.graphic, f, evaluator );
}
};
}
}
in.nextTag();
} else if ( in.getLocalName().endsWith( "Parameter" ) ) {
String cssName = in.getAttributeValue( null, "name" );
if ( cssName.equals( "fill" ) ) {
contn = updateOrContinue( in, "Parameter", base, new Updater<Fill>() {
@Override
public void update( Fill obj, String val ) {
// keep alpha value
int alpha = obj.color.getAlpha();
obj.color = decodeWithAlpha( val );
obj.color = new Color( obj.color.getRed(), obj.color.getGreen(), obj.color.getBlue(), alpha );
}
}, contn ).second;
}
if ( cssName.equals( "fill-opacity" ) ) {
contn = updateOrContinue( in, "Parameter", base, new Updater<Fill>() {
@Override
public void update( Fill obj, String val ) {
// keep original color
float alpha = max( 0, min( 1, parseFloat( val ) ) );
float[] cols = obj.color.getRGBColorComponents( null );
obj.color = new Color( cols[0], cols[1], cols[2], alpha );
}
}, contn ).second;
}
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
in.require( END_ELEMENT, null, "Fill" );
return new Pair<Fill, Continuation<Fill>>( base, contn );
}
private Pair<Stroke, Continuation<Stroke>> parseStroke( XMLStreamReader in )
throws XMLStreamException {
in.require( START_ELEMENT, null, "Stroke" );
Stroke base = new Stroke();
Continuation<Stroke> contn = null;
while ( !( in.isEndElement() && in.getLocalName().equals( "Stroke" ) ) ) {
in.nextTag();
if ( in.getLocalName().endsWith( "Parameter" ) ) {
String name = in.getAttributeValue( null, "name" );
if ( name.equals( "stroke" ) ) {
contn = updateOrContinue( in, "Parameter", base, new Updater<Stroke>() {
@Override
public void update( Stroke obj, String val ) {
// keep alpha value
int alpha = obj.color.getAlpha();
obj.color = decodeWithAlpha( val );
obj.color = new Color( obj.color.getRed(), obj.color.getGreen(), obj.color.getBlue(), alpha );
}
}, contn ).second;
} else if ( name.equals( "stroke-opacity" ) ) {
contn = updateOrContinue( in, "Parameter", base, new Updater<Stroke>() {
@Override
public void update( Stroke obj, String val ) {
// keep original color
float alpha = Float.parseFloat( val );
float[] cols = obj.color.getRGBColorComponents( null );
obj.color = new Color( cols[0], cols[1], cols[2], alpha );
}
}, contn ).second;
} else if ( name.equals( "stroke-width" ) ) {
contn = updateOrContinue( in, "Parameter", base, new Updater<Stroke>() {
@Override
public void update( Stroke obj, String val ) {
obj.width = Double.parseDouble( val );
}
}, contn ).second;
} else if ( name.equals( "stroke-linejoin" ) ) {
contn = updateOrContinue( in, "Parameter", base, new Updater<Stroke>() {
@Override
public void update( Stroke obj, String val ) {
try {
obj.linejoin = LineJoin.valueOf( val.toUpperCase() );
} catch ( IllegalArgumentException e ) {
LOG.warn( "Used invalid value '{}' for line join.", val );
obj.linejoin = ROUND;
}
}
}, contn ).second;
} else if ( name.equals( "stroke-linecap" ) ) {
contn = updateOrContinue( in, "Parameter", base, new Updater<Stroke>() {
@Override
public void update( Stroke obj, String val ) {
try {
obj.linecap = LineCap.valueOf( val.toUpperCase() );
} catch ( IllegalArgumentException e ) {
LOG.warn( "Used invalid value '{}' for line cap.", val );
obj.linecap = BUTT;
}
}
}, contn ).second;
} else if ( name.equals( "stroke-dasharray" ) ) {
contn = updateOrContinue( in, "Parameter", base, new Updater<Stroke>() {
@Override
public void update( Stroke obj, String val ) {
// , is not strictly allowed, but we don't lose anything by being flexible
if ( val.contains( "," ) ) {
obj.dasharray = splitAsDoubles( val, "," );
} else {
obj.dasharray = splitAsDoubles( val, " " );
}
}
}, contn ).second;
} else if ( name.equals( "stroke-dashoffset" ) ) {
contn = updateOrContinue( in, "Parameter", base, new Updater<Stroke>() {
@Override
public void update( Stroke obj, String val ) {
obj.dashoffset = Double.parseDouble( val );
}
}, contn ).second;
} else {
Location loc = in.getLocation();
LOG.error( "Found unknown parameter '{}' at line {}, column {}, skipping.",
new Object[] { name, loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
in.require( END_ELEMENT, null, null );
} else if ( in.getLocalName().equals( "GraphicFill" ) ) {
in.nextTag();
final Pair<Graphic, Continuation<Graphic>> pair = parseGraphic( in );
if ( pair != null ) {
base.fill = pair.first;
if ( pair.second != null ) {
contn = new Continuation<Stroke>( contn ) {
@Override
public void updateStep( Stroke base, Feature f, XPathEvaluator<Feature> evaluator ) {
pair.second.evaluate( base.fill, f, evaluator );
}
};
}
}
in.require( END_ELEMENT, null, "Graphic" );
in.nextTag();
in.require( END_ELEMENT, null, "GraphicFill" );
} else if ( in.getLocalName().equals( "GraphicStroke" ) ) {
while ( !( in.isEndElement() && in.getLocalName().equals( "GraphicStroke" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "Graphic" ) ) {
final Pair<Graphic, Continuation<Graphic>> pair = parseGraphic( in );
if ( pair != null ) {
base.stroke = pair.first;
if ( pair.second != null ) {
contn = new Continuation<Stroke>( contn ) {
@Override
public void updateStep( Stroke base, Feature f, XPathEvaluator<Feature> evaluator ) {
pair.second.evaluate( base.stroke, f, evaluator );
}
};
}
}
in.require( END_ELEMENT, null, "Graphic" );
} else if ( in.getLocalName().equals( "InitialGap" ) ) {
contn = updateOrContinue( in, "InitialGap", base, new Updater<Stroke>() {
@Override
public void update( Stroke obj, String val ) {
obj.strokeInitialGap = Double.parseDouble( val );
}
}, contn ).second;
in.require( END_ELEMENT, null, "InitialGap" );
} else if ( in.getLocalName().equals( "Gap" ) ) {
contn = updateOrContinue( in, "Gap", base, new Updater<Stroke>() {
@Override
public void update( Stroke obj, String val ) {
obj.strokeGap = Double.parseDouble( val );
}
}, contn ).second;
in.require( END_ELEMENT, null, "Gap" );
} else if ( in.getLocalName().equals( "PositionPercentage" ) ) {
contn = updateOrContinue( in, "PositionPercentage", base, new Updater<Stroke>() {
@Override
public void update( Stroke obj, String val ) {
obj.positionPercentage = Double.parseDouble( val );
}
}, contn ).second;
in.require( END_ELEMENT, null, "PositionPercentage" );
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
} else if ( in.isStartElement() ) {
LOG.error( "Found unknown element '{}', skipping.", in.getLocalName() );
skipElement( in );
}
}
in.require( END_ELEMENT, null, "Stroke" );
return new Pair<Stroke, Continuation<Stroke>>( base, contn );
}
private Pair<Mark, Continuation<Mark>> parseMark( XMLStreamReader in )
throws XMLStreamException {
in.require( START_ELEMENT, null, "Mark" );
Mark base = new Mark();
Continuation<Mark> contn = null;
in.nextTag();
while ( !( in.isEndElement() && in.getLocalName().equals( "Mark" ) ) ) {
if ( in.isEndElement() ) {
in.nextTag();
}
if ( in.getLocalName().equals( "WellKnownName" ) ) {
String wkn = in.getElementText();
try {
base.wellKnown = SimpleMark.valueOf( wkn.toUpperCase() );
} catch ( IllegalArgumentException e ) {
LOG.warn( "Specified unsupported WellKnownName of '{}', using square instead.", wkn );
base.wellKnown = SimpleMark.SQUARE;
}
} else
sym: if ( in.getLocalName().equals( "OnlineResource" ) || in.getLocalName().equals( "InlineContent" ) ) {
LOG.debug( "Loading mark from external file." );
Triple<InputStream, String, Continuation<StringBuffer>> pair = getOnlineResourceOrInlineContent( in );
if ( pair == null ) {
in.nextTag();
break sym;
}
InputStream is = pair.first;
in.nextTag();
in.require( START_ELEMENT, null, "Format" );
String format = in.getElementText();
in.require( END_ELEMENT, null, "Format" );
in.nextTag();
if ( in.getLocalName().equals( "MarkIndex" ) ) {
base.markIndex = Integer.parseInt( in.getElementText() );
}
if ( is != null ) {
try {
java.awt.Font font = null;
if ( format.equalsIgnoreCase( "ttf" ) ) {
font = createFont( TRUETYPE_FONT, is );
}
if ( format.equalsIgnoreCase( "type1" ) ) {
font = createFont( TYPE1_FONT, is );
}
if ( format.equalsIgnoreCase( "svg" ) ) {
base.shape = RenderHelper.getShapeFromSvg( is, pair.second );
}
if ( font == null && base.shape == null ) {
LOG.warn( "Mark was not loaded, because the format '{}' is not supported.", format );
break sym;
}
if ( font != null && base.markIndex >= font.getNumGlyphs() - 1 ) {
LOG.warn( "The font only contains {} glyphs, but the index given was {}.",
font.getNumGlyphs(), base.markIndex );
break sym;
}
base.font = font;
} catch ( FontFormatException e ) {
LOG.debug( "Stack trace:", e );
LOG.warn( "The file was not a valid '{}' file: '{}'", format, e.getLocalizedMessage() );
} catch ( IOException e ) {
LOG.debug( "Stack trace:", e );
LOG.warn( "The file could not be read: '{}'.", e.getLocalizedMessage() );
}
}
} else if ( in.getLocalName().equals( "Fill" ) ) {
final Pair<Fill, Continuation<Fill>> fill = parseFill( in );
base.fill = fill.first;
if ( fill.second != null ) {
contn = new Continuation<Mark>( contn ) {
@Override
public void updateStep( Mark base, Feature f, XPathEvaluator<Feature> evaluator ) {
fill.second.evaluate( base.fill, f, evaluator );
}
};
}
} else if ( in.getLocalName().equals( "Stroke" ) ) {
final Pair<Stroke, Continuation<Stroke>> stroke = parseStroke( in );
base.stroke = stroke.first;
if ( stroke.second != null ) {
contn = new Continuation<Mark>( contn ) {
@Override
public void updateStep( Mark base, Feature f, XPathEvaluator<Feature> evaluator ) {
stroke.second.evaluate( base.stroke, f, evaluator );
}
};
}
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
in.require( END_ELEMENT, null, "Mark" );
return new Pair<Mark, Continuation<Mark>>( base, contn );
}
private Triple<InputStream, String, Continuation<StringBuffer>> getOnlineResourceOrInlineContent( XMLStreamReader in )
throws XMLStreamException {
if ( in.getLocalName().equals( "OnlineResource" ) ) {
String str = in.getAttributeValue( XLNNS, "href" );
if ( str == null ) {
Continuation<StringBuffer> contn = updateOrContinue( in, "OnlineResource", new StringBuffer(),
SBUPDATER, null ).second;
return new Triple<InputStream, String, Continuation<StringBuffer>>( null, null, contn );
}
String strUrl = null;
try {
URL url = resolve( str, in );
strUrl = url.toExternalForm();
LOG.debug( "Loading from URL '{}'", url );
in.nextTag();
return new Triple<InputStream, String, Continuation<StringBuffer>>( url.openStream(), strUrl, null );
} catch ( IOException e ) {
LOG.debug( "Stack trace:", e );
LOG.warn( "Could not retrieve content at URL '{}'.", str );
return null;
}
} else if ( in.getLocalName().equals( "InlineContent" ) ) {
String format = in.getAttributeValue( null, "encoding" );
if ( format.equalsIgnoreCase( "base64" ) ) {
ByteArrayInputStream bis = new ByteArrayInputStream( Base64.decode( in.getElementText() ) );
return new Triple<InputStream, String, Continuation<StringBuffer>>( bis, null, null );
}
if ( format.equalsIgnoreCase( "xml" ) ) {
// TODO
}
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
return null;
}
private Triple<BufferedImage, String, Continuation<List<BufferedImage>>> parseExternalGraphic(
final XMLStreamReader in )
throws IOException, XMLStreamException {
// TODO color replacement
in.require( START_ELEMENT, null, "ExternalGraphic" );
String format = null;
BufferedImage img = null;
String url = null;
Triple<InputStream, String, Continuation<StringBuffer>> pair = null;
Continuation<List<BufferedImage>> contn = null; // needs to be list to be updateable by reference...
while ( !( in.isEndElement() && in.getLocalName().equals( "ExternalGraphic" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "Format" ) ) {
format = in.getElementText();
} else if ( in.getLocalName().equals( "OnlineResource" ) || in.getLocalName().equals( "InlineContent" ) ) {
pair = getOnlineResourceOrInlineContent( in );
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
try {
if ( pair != null ) {
if ( pair.first != null && format != null && ( format.toLowerCase().indexOf( "svg" ) == -1 ) ) {
img = ImageIO.read( pair.first );
}
url = pair.second;
final Continuation<StringBuffer> sbcontn = pair.third;
if ( pair.third != null ) {
final LinkedHashMap<String, BufferedImage> cache = new LinkedHashMap<String, BufferedImage>( 256 ) {
private static final long serialVersionUID = -6847956873232942891L;
@Override
protected boolean removeEldestEntry( Map.Entry<String, BufferedImage> eldest ) {
return size() > 256; // yeah, hardcoded max size... TODO
}
};
contn = new Continuation<List<BufferedImage>>() {
@Override
public void updateStep( List<BufferedImage> base, Feature f, XPathEvaluator<Feature> evaluator ) {
StringBuffer sb = new StringBuffer();
sbcontn.evaluate( sb, f, evaluator );
String file = sb.toString();
if ( cache.containsKey( file ) ) {
base.add( cache.get( file ) );
return;
}
try {
BufferedImage i = ImageIO.read( resolve( file, in ) );
base.add( i );
cache.put( file, i );
} catch ( MalformedURLException e ) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch ( IOException e ) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
};
}
}
} finally {
if ( pair != null ) {
try {
pair.first.close();
} catch ( Exception e ) {
LOG.trace( "Stack trace when closing input stream:", e );
}
}
}
return new Triple<BufferedImage, String, Continuation<List<BufferedImage>>>( img, url, contn );
}
private Pair<Graphic, Continuation<Graphic>> parseGraphic( XMLStreamReader in )
throws XMLStreamException {
in.require( START_ELEMENT, null, "Graphic" );
Graphic base = new Graphic();
Continuation<Graphic> contn = null;
while ( !( in.isEndElement() && in.getLocalName().equals( "Graphic" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "Mark" ) ) {
final Pair<Mark, Continuation<Mark>> pair = parseMark( in );
if ( pair != null ) {
base.mark = pair.first;
if ( pair.second != null ) {
contn = new Continuation<Graphic>( contn ) {
@Override
public void updateStep( Graphic base, Feature f, XPathEvaluator<Feature> evaluator ) {
pair.second.evaluate( base.mark, f, evaluator );
}
};
}
}
} else if ( in.getLocalName().equals( "ExternalGraphic" ) ) {
try {
final Triple<BufferedImage, String, Continuation<List<BufferedImage>>> p = parseExternalGraphic( in );
if ( p.third != null ) {
contn = new Continuation<Graphic>( contn ) {
@Override
public void updateStep( Graphic base, Feature f, XPathEvaluator<Feature> evaluator ) {
LinkedList<BufferedImage> list = new LinkedList<BufferedImage>();
p.third.evaluate( list, f, evaluator );
base.image = list.poll();
}
};
} else {
base.image = p.first;
base.imageURL = p.second;
}
} catch ( IOException e ) {
LOG.debug( "Stack trace", e );
LOG.warn( get( "R2D.EXTERNAL_GRAPHIC_NOT_LOADED" ),
new Object[] { in.getLocation().getLineNumber(), in.getLocation().getColumnNumber(),
in.getLocation().getSystemId() } );
}
} else if ( in.getLocalName().equals( "Opacity" ) ) {
contn = updateOrContinue( in, "Opacity", base, new Updater<Graphic>() {
public void update( Graphic obj, String val ) {
obj.opacity = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.getLocalName().equals( "Size" ) ) {
contn = updateOrContinue( in, "Size", base, new Updater<Graphic>() {
public void update( Graphic obj, String val ) {
obj.size = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.getLocalName().equals( "Rotation" ) ) {
contn = updateOrContinue( in, "Rotation", base, new Updater<Graphic>() {
public void update( Graphic obj, String val ) {
obj.rotation = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.getLocalName().equals( "AnchorPoint" ) ) {
while ( !( in.isEndElement() && in.getLocalName().equals( "AnchorPoint" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "AnchorPointX" ) ) {
contn = updateOrContinue( in, "AnchorPointX", base, new Updater<Graphic>() {
public void update( Graphic obj, String val ) {
obj.anchorPointX = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.getLocalName().equals( "AnchorPointY" ) ) {
contn = updateOrContinue( in, "AnchorPointY", base, new Updater<Graphic>() {
public void update( Graphic obj, String val ) {
obj.anchorPointY = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
} else if ( in.getLocalName().equals( "Displacement" ) ) {
while ( !( in.isEndElement() && in.getLocalName().equals( "Displacement" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "DisplacementX" ) ) {
contn = updateOrContinue( in, "DisplacementX", base, new Updater<Graphic>() {
public void update( Graphic obj, String val ) {
obj.displacementX = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.getLocalName().equals( "DisplacementY" ) ) {
contn = updateOrContinue( in, "DisplacementY", base, new Updater<Graphic>() {
public void update( Graphic obj, String val ) {
obj.displacementY = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
in.require( END_ELEMENT, null, "Graphic" );
return new Pair<Graphic, Continuation<Graphic>>( base, contn );
}
/**
* @param in
* @param uom
* @return a new symbolizer
* @throws XMLStreamException
*/
public Symbolizer<PointStyling> parsePointSymbolizer( XMLStreamReader in, UOM uom )
throws XMLStreamException {
in.require( START_ELEMENT, null, "PointSymbolizer" );
Common common = new Common( in.getLocation() );
PointStyling baseOrEvaluated = new PointStyling();
baseOrEvaluated.uom = uom;
while ( !( in.isEndElement() && in.getLocalName().equals( "PointSymbolizer" ) ) ) {
in.nextTag();
checkCommon( common, in );
if ( in.getLocalName().equals( "Graphic" ) ) {
final Pair<Graphic, Continuation<Graphic>> pair = parseGraphic( in );
if ( pair == null ) {
return new Symbolizer<PointStyling>( baseOrEvaluated, common.geometry, common.name, common.loc,
common.line, common.col );
}
baseOrEvaluated.graphic = pair.first;
if ( pair.second != null ) {
return new Symbolizer<PointStyling>( baseOrEvaluated, new Continuation<PointStyling>() {
@Override
public void updateStep( PointStyling base, Feature f, XPathEvaluator<Feature> evaluator ) {
pair.second.evaluate( base.graphic, f, evaluator );
}
}, common.geometry, null, common.loc, common.line, common.col );
}
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
in.require( END_ELEMENT, null, "PointSymbolizer" );
return new Symbolizer<PointStyling>( baseOrEvaluated, common.geometry, common.name, common.loc, common.line,
common.col );
}
static UOM getUOM( String uom ) {
if ( uom != null ) {
String u = uom.toLowerCase();
if ( u.endsWith( "metre" ) || u.endsWith( "meter" ) ) {
return Metre;
} else if ( u.endsWith( "mm" ) ) {
return mm;
} else if ( u.endsWith( "foot" ) ) {
return Foot;
} else if ( !u.endsWith( "pixel" ) ) {
LOG.warn( "Unknown unit of measure '{}', using pixel instead.", uom );
}
}
return Pixel;
}
/**
* @param in
* @return the symbolizer
* @throws XMLStreamException
*/
public Triple<Symbolizer<?>, Continuation<StringBuffer>, String> parseSymbolizer( XMLStreamReader in )
throws XMLStreamException {
in.require( START_ELEMENT, null, null );
if ( in.getLocalName().endsWith( "Symbolizer" ) ) {
UOM uom = getUOM( in.getAttributeValue( null, "uom" ) );
if ( in.getLocalName().equals( "PointSymbolizer" ) ) {
return new Triple<Symbolizer<?>, Continuation<StringBuffer>, String>( parsePointSymbolizer( in, uom ),
null, null );
}
if ( in.getLocalName().equals( "LineSymbolizer" ) ) {
return new Triple<Symbolizer<?>, Continuation<StringBuffer>, String>( parseLineSymbolizer( in, uom ),
null, null );
}
if ( in.getLocalName().equals( "PolygonSymbolizer" ) ) {
return new Triple<Symbolizer<?>, Continuation<StringBuffer>, String>(
parsePolygonSymbolizer( in, uom ),
null, null );
}
if ( in.getLocalName().equals( "RasterSymbolizer" ) ) {
return new Triple<Symbolizer<?>, Continuation<StringBuffer>, String>( parseRasterSymbolizer( in, uom ),
null, null );
}
if ( in.getLocalName().equals( "TextSymbolizer" ) ) {
return (Triple) parseTextSymbolizer( in, uom );
}
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
return null;
}
/**
* @param in
* @param uom
* @return the symbolizer
* @throws XMLStreamException
*/
public Symbolizer<RasterStyling> parseRasterSymbolizer( XMLStreamReader in, UOM uom )
throws XMLStreamException {
in.require( START_ELEMENT, null, "RasterSymbolizer" );
Common common = new Common( in.getLocation() );
RasterStyling baseOrEvaluated = new RasterStyling();
baseOrEvaluated.uom = uom;
Continuation<RasterStyling> contn = null;
while ( !( in.isEndElement() && in.getLocalName().equals( "RasterSymbolizer" ) ) ) {
in.nextTag();
checkCommon( common, in );
if ( in.getLocalName().equals( "Opacity" ) ) {
contn = updateOrContinue( in, "Opacity", baseOrEvaluated, new Updater<RasterStyling>() {
@Override
public void update( RasterStyling obj, String val ) {
obj.opacity = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.getLocalName().equals( "ChannelSelection" ) ) {
String red = null, green = null, blue = null, gray = null;
HashMap<String, ContrastEnhancement> enhancements = new HashMap<String, ContrastEnhancement>( 10 );
while ( !( in.isEndElement() && in.getLocalName().equals( "ChannelSelection" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "RedChannel" ) ) {
in.nextTag();
in.require( START_ELEMENT, null, "SourceChannelName" );
red = in.getElementText();
in.nextTag();
ContrastEnhancement enh = parseContrastEnhancement( in );
if ( enh != null ) {
enhancements.put( "red", enh );
}
in.nextTag();
} else if ( in.getLocalName().equals( "GreenChannel" ) ) {
in.nextTag();
in.require( START_ELEMENT, null, "SourceChannelName" );
green = in.getElementText();
in.nextTag();
ContrastEnhancement enh = parseContrastEnhancement( in );
if ( enh != null ) {
enhancements.put( "green", enh );
}
in.nextTag();
} else if ( in.getLocalName().equals( "BlueChannel" ) ) {
in.nextTag();
in.require( START_ELEMENT, null, "SourceChannelName" );
blue = in.getElementText();
in.nextTag();
ContrastEnhancement enh = parseContrastEnhancement( in );
if ( enh != null ) {
enhancements.put( "blue", enh );
}
in.nextTag();
} else if ( in.getLocalName().equals( "GrayChannel" ) ) {
in.nextTag();
in.require( START_ELEMENT, null, "SourceChannelName" );
gray = in.getElementText();
in.nextTag();
ContrastEnhancement enh = parseContrastEnhancement( in );
if ( enh != null ) {
enhancements.put( "gray", enh );
}
in.nextTag();
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
baseOrEvaluated.channelSelection = new RasterChannelSelection( red, green, blue, gray, enhancements );
} else if ( in.getLocalName().equals( "OverlapBehavior" ) ) {
// actual difference between SLD 1.0.0/SE 1.1.0
if ( in.getNamespaceURI().equals( SENS ) ) {
baseOrEvaluated.overlap = Overlap.valueOf( in.getElementText() );
} else {
in.nextTag();
baseOrEvaluated.overlap = Overlap.valueOf( in.getLocalName() );
in.nextTag();
in.nextTag();
}
} else if ( in.getLocalName().equals( "ColorMap" ) ) {
if ( in.getNamespaceURI().equals( SENS ) ) {
in.nextTag();
if ( in.getLocalName().equals( "Categorize" ) ) {
baseOrEvaluated.categorize = new Categorize().parse( in );
} else if ( in.getLocalName().equals( "Interpolate" ) ) {
baseOrEvaluated.interpolate = new Interpolate().parse( in );
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
in.nextTag();
} else {
baseOrEvaluated.interpolate = Interpolate.parseSLD100( in );
}
} else if ( in.getLocalName().equals( "ContrastEnhancement" ) ) {
baseOrEvaluated.contrastEnhancement = parseContrastEnhancement( in );
} else if ( in.getLocalName().equals( "ShadedRelief" ) ) {
baseOrEvaluated.shaded = new ShadedRelief();
while ( !( in.isEndElement() && in.getLocalName().equals( "ShadedRelief" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "BrightnessOnly" ) ) {
baseOrEvaluated.shaded.brightnessOnly = getElementTextAsBoolean( in );
}
if ( in.getLocalName().equals( "ReliefFactor" ) ) {
baseOrEvaluated.shaded.reliefFactor = parseDouble( in.getElementText() );
}
if ( in.getLocalName().equals( "AzimuthAngle" ) ) {
baseOrEvaluated.shaded.azimuthAngle = parseDouble( in.getElementText() );
}
if ( in.getLocalName().equals( "IlluminationAngle" ) ) {
baseOrEvaluated.shaded.alt = parseDouble( in.getElementText() );
}
}
} else if ( in.getLocalName().equals( "ImageOutline" ) ) {
in.nextTag();
if ( in.getLocalName().equals( "LineSymbolizer" ) ) {
baseOrEvaluated.imageOutline = parseLineSymbolizer( in,
getUOM( in.getAttributeValue( null, "uom" ) ) );
}
if ( in.getLocalName().equals( "PolygonSymbolizer" ) ) {
baseOrEvaluated.imageOutline = parsePolygonSymbolizer( in, getUOM( in.getAttributeValue( null,
"uom" ) ) );
}
in.nextTag();
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
in.require( END_ELEMENT, null, "RasterSymbolizer" );
return new Symbolizer<RasterStyling>( baseOrEvaluated, contn, common.geometry, common.name, common.loc,
common.line, common.col );
}
private ContrastEnhancement parseContrastEnhancement( XMLStreamReader in )
throws XMLStreamException {
if ( !in.getLocalName().equals( "ContrastEnhancement" ) ) {
return null;
}
ContrastEnhancement base = new ContrastEnhancement();
while ( !( in.isEndElement() && in.getLocalName().equals( "ContrastEnhancement" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "Normalize" ) ) {
in.nextTag();
base.normalize = true;
} else if ( in.getLocalName().equals( "Histogram" ) ) {
base.histogram = true;
} else if ( in.getLocalName().equals( "GammaValue" ) ) {
base.gamma = parseDouble( in.getElementText() );
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
return base;
}
/**
* @param in
* @param uom
* @return the symbolizer
* @throws XMLStreamException
*/
public Symbolizer<LineStyling> parseLineSymbolizer( XMLStreamReader in, UOM uom )
throws XMLStreamException {
in.require( START_ELEMENT, null, "LineSymbolizer" );
Common common = new Common( in.getLocation() );
LineStyling baseOrEvaluated = new LineStyling();
baseOrEvaluated.uom = uom;
Continuation<LineStyling> contn = null;
while ( !( in.isEndElement() && in.getLocalName().equals( "LineSymbolizer" ) ) ) {
in.nextTag();
checkCommon( common, in );
if ( in.getLocalName().equals( "Stroke" ) ) {
final Pair<Stroke, Continuation<Stroke>> pair = parseStroke( in );
if ( pair != null ) {
baseOrEvaluated.stroke = pair.first;
if ( pair.second != null ) {
contn = new Continuation<LineStyling>( contn ) {
@Override
public void updateStep( LineStyling base, Feature f, XPathEvaluator<Feature> evaluator ) {
pair.second.evaluate( base.stroke, f, evaluator );
}
};
}
}
} else if ( in.getLocalName().equals( "PerpendicularOffset" ) ) {
baseOrEvaluated.perpendicularOffsetType = getPerpendicularOffsetType( in );
contn = updateOrContinue( in, "PerpendicularOffset", baseOrEvaluated, new Updater<LineStyling>() {
@Override
public void update( LineStyling obj, String val ) {
obj.perpendicularOffset = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
if ( contn == null ) {
return new Symbolizer<LineStyling>( baseOrEvaluated, common.geometry, common.name, common.loc, common.line,
common.col );
}
return new Symbolizer<LineStyling>( baseOrEvaluated, contn, common.geometry, common.name, common.loc,
common.line, common.col );
}
/**
* @param in
* @param uom
* @return the symbolizer
* @throws XMLStreamException
*/
public Symbolizer<PolygonStyling> parsePolygonSymbolizer( XMLStreamReader in, UOM uom )
throws XMLStreamException {
in.require( START_ELEMENT, null, "PolygonSymbolizer" );
Common common = new Common( in.getLocation() );
PolygonStyling baseOrEvaluated = new PolygonStyling();
baseOrEvaluated.uom = uom;
Continuation<PolygonStyling> contn = null;
while ( !( in.isEndElement() && in.getLocalName().equals( "PolygonSymbolizer" ) ) ) {
in.nextTag();
checkCommon( common, in );
if ( in.getLocalName().equals( "Stroke" ) ) {
final Pair<Stroke, Continuation<Stroke>> pair = parseStroke( in );
if ( pair != null ) {
baseOrEvaluated.stroke = pair.first;
if ( pair.second != null ) {
contn = new Continuation<PolygonStyling>( contn ) {
@Override
public void updateStep( PolygonStyling base, Feature f, XPathEvaluator<Feature> evaluator ) {
pair.second.evaluate( base.stroke, f, evaluator );
}
};
}
}
} else if ( in.getLocalName().equals( "Fill" ) ) {
final Pair<Fill, Continuation<Fill>> fillPair = parseFill( in );
if ( fillPair != null ) {
baseOrEvaluated.fill = fillPair.first;
if ( fillPair.second != null ) {
contn = new Continuation<PolygonStyling>( contn ) {
@Override
public void updateStep( PolygonStyling base, Feature f, XPathEvaluator<Feature> evaluator ) {
fillPair.second.evaluate( base.fill, f, evaluator );
}
};
}
}
} else if ( in.getLocalName().equals( "PerpendicularOffset" ) ) {
baseOrEvaluated.perpendicularOffsetType = getPerpendicularOffsetType( in );
contn = updateOrContinue( in, "PerpendicularOffset", baseOrEvaluated, new Updater<PolygonStyling>() {
@Override
public void update( PolygonStyling obj, String val ) {
obj.perpendicularOffset = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.getLocalName().equals( "Displacement" ) ) {
while ( !( in.isEndElement() && in.getLocalName().equals( "Displacement" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "DisplacementX" ) ) {
contn = updateOrContinue( in, "DisplacementX", baseOrEvaluated, new Updater<PolygonStyling>() {
@Override
public void update( PolygonStyling obj, String val ) {
obj.displacementX = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.getLocalName().equals( "DisplacementY" ) ) {
contn = updateOrContinue( in, "DisplacementY", baseOrEvaluated, new Updater<PolygonStyling>() {
@Override
public void update( PolygonStyling obj, String val ) {
obj.displacementY = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
if ( contn == null ) {
return new Symbolizer<PolygonStyling>( baseOrEvaluated, common.geometry, common.name, common.loc,
common.line, common.col );
}
return new Symbolizer<PolygonStyling>( baseOrEvaluated, contn, common.geometry, common.name, common.loc,
common.line, common.col );
}
/**
* @param <T>
* @param in
* @param name
* @param obj
* @param updater
* @param contn
* @return either contn, or a new continuation which updates obj, also the XML snippet (w/ filter expressions
* re-exported) which was parsed (or null, if none was parsed)
* @throws XMLStreamException
*/
public <T> Pair<String, Continuation<T>> updateOrContinue( XMLStreamReader in, String name, T obj,
final Updater<T> updater, Continuation<T> contn )
throws XMLStreamException {
StringBuilder xmlText = collectXMLSnippets ? new StringBuilder() : null;
if ( in.getLocalName().endsWith( name ) ) {
final LinkedList<Pair<String, Pair<Expression, String>>> text = new LinkedList<Pair<String, Pair<Expression, String>>>(); // no
// real 'alternative', have we?
boolean textOnly = true;
while ( !( in.isEndElement() && in.getLocalName().endsWith( name ) ) ) {
in.next();
if ( in.isStartElement() ) {
Expression expr = parseExpression( in );
if ( collectXMLSnippets ) {
StringWriter sw = new StringWriter();
XMLStreamWriter out = XMLOutputFactory.newInstance().createXMLStreamWriter( sw );
Filter110XMLEncoder.export( expr, out );
xmlText.append( sw.toString() );
}
Pair<Expression, String> second;
second = new Pair<Expression, String>( expr, get( "R2D.LINE", in.getLocation().getLineNumber(),
in.getLocation().getColumnNumber(),
in.getLocation().getSystemId() ) );
text.add( new Pair<String, Pair<Expression, String>>( null, second ) );
textOnly = false;
}
if ( in.isCharacters() ) {
if ( collectXMLSnippets ) {
xmlText.append( in.getText() );
}
if ( textOnly && !text.isEmpty() ) { // concat text in case of multiple text nodes from
// beginning
String txt = text.removeLast().first;
text.add( new Pair<String, Pair<Expression, String>>( txt + in.getText().trim(), null ) );
} else {
text.add( new Pair<String, Pair<Expression, String>>( in.getText().trim(), null ) );
}
}
}
in.require( END_ELEMENT, null, null );
if ( textOnly ) {
if ( text.isEmpty() ) {
LOG.warn( "Expression was empty at line {}, column {}.", in.getLocation().getLineNumber(),
in.getLocation().getColumnNumber() );
}
updater.update( obj, text.isEmpty() ? "" : text.getFirst().first );
} else {
contn = new Continuation<T>( contn ) {
@Override
public void updateStep( T base, Feature f, XPathEvaluator<Feature> evaluator ) {
StringBuilder tmp = new StringBuilder();
for ( Pair<String, Pair<Expression, String>> p : text ) {
if ( p.first != null ) {
tmp.append( p.first );
}
if ( p.second != null ) {
try {
TypedObjectNode[] evald = p.second.first.evaluate( f, evaluator );
if ( evald.length == 0 ) {
LOG.warn( get( "R2D.EXPRESSION_TO_NULL" ), p.second.second );
} else {
tmp.append( evald[0] );
}
} catch ( FilterEvaluationException e ) {
LOG.warn( get( "R2D.ERROR_EVAL" ), e.getLocalizedMessage(), p.second.second );
}
}
}
updater.update( base, tmp.toString() );
}
};
}
}
return new Pair<String, Continuation<T>>( collectXMLSnippets ? xmlText.toString().trim() : null, contn );
}
/**
* @param in
* @param uom
* @return the symbolizer
* @throws XMLStreamException
*/
public Triple<Symbolizer<TextStyling>, Continuation<StringBuffer>, String> parseTextSymbolizer( XMLStreamReader in,
UOM uom )
throws XMLStreamException {
in.require( START_ELEMENT, null, "TextSymbolizer" );
Common common = new Common( in.getLocation() );
TextStyling baseOrEvaluated = new TextStyling();
baseOrEvaluated.uom = uom;
Continuation<TextStyling> contn = null;
Continuation<StringBuffer> label = null;
String xmlText = null;
while ( !( in.isEndElement() && in.getLocalName().equals( "TextSymbolizer" ) ) ) {
in.nextTag();
checkCommon( common, in );
if ( in.getLocalName().equals( "Label" ) ) {
Pair<String, Continuation<StringBuffer>> res = updateOrContinue( in, "Label", new StringBuffer(),
new Updater<StringBuffer>() {
@Override
public void update(
StringBuffer obj,
String val ) {
obj.append( val );
}
}, null );
xmlText = res.first;
label = res.second;
} else if ( in.getLocalName().equals( "LabelPlacement" ) ) {
while ( !( in.isEndElement() && in.getLocalName().equalsIgnoreCase( "LabelPlacement" ) ) ) {
in.nextTag();
if ( in.getLocalName().equalsIgnoreCase( "PointPlacement" ) ) {
while ( !( in.isEndElement() && in.getLocalName().equals( "PointPlacement" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "AnchorPoint" ) ) {
while ( !( in.isEndElement() && in.getLocalName().equals( "AnchorPoint" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "AnchorPointX" ) ) {
contn = updateOrContinue( in, "AnchorPointX", baseOrEvaluated,
new Updater<TextStyling>() {
@Override
public void update( TextStyling obj, String val ) {
obj.anchorPointX = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.getLocalName().equals( "AnchorPointY" ) ) {
contn = updateOrContinue( in, "AnchorPointY", baseOrEvaluated,
new Updater<TextStyling>() {
@Override
public void update( TextStyling obj, String val ) {
obj.anchorPointY = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(),
loc.getColumnNumber() } );
skipElement( in );
}
}
} else if ( in.getLocalName().equals( "Displacement" ) ) {
while ( !( in.isEndElement() && in.getLocalName().equals( "Displacement" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "DisplacementX" ) ) {
contn = updateOrContinue( in, "DisplacementX", baseOrEvaluated,
new Updater<TextStyling>() {
@Override
public void update( TextStyling obj, String val ) {
obj.displacementX = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.getLocalName().equals( "DisplacementY" ) ) {
contn = updateOrContinue( in, "DisplacementY", baseOrEvaluated,
new Updater<TextStyling>() {
@Override
public void update( TextStyling obj, String val ) {
obj.displacementY = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(),
loc.getColumnNumber() } );
skipElement( in );
}
}
} else if ( in.getLocalName().equals( "Rotation" ) ) {
contn = updateOrContinue( in, "Rotation", baseOrEvaluated, new Updater<TextStyling>() {
@Override
public void update( TextStyling obj, String val ) {
obj.rotation = Double.parseDouble( val );
}
}, contn ).second;
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error(
"Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
}
if ( in.getLocalName().equals( "LinePlacement" ) ) {
final Pair<LinePlacement, Continuation<LinePlacement>> pair = parseLinePlacement( in );
if ( pair != null ) {
baseOrEvaluated.linePlacement = pair.first;
if ( pair.second != null ) {
contn = new Continuation<TextStyling>( contn ) {
@Override
public void updateStep( TextStyling base, Feature f,
XPathEvaluator<Feature> evaluator ) {
pair.second.evaluate( base.linePlacement, f, evaluator );
}
};
}
}
}
}
} else if ( in.getLocalName().equals( "Halo" ) ) {
final Pair<Halo, Continuation<Halo>> haloPair = parseHalo( in );
if ( haloPair != null ) {
baseOrEvaluated.halo = haloPair.first;
if ( haloPair.second != null ) {
contn = new Continuation<TextStyling>( contn ) {
@Override
public void updateStep( TextStyling base, Feature f, XPathEvaluator<Feature> evaluator ) {
haloPair.second.evaluate( base.halo, f, evaluator );
}
};
}
}
} else if ( in.getLocalName().equals( "Font" ) ) {
final Pair<Font, Continuation<Font>> fontPair = parseFont( in );
if ( fontPair != null ) {
baseOrEvaluated.font = fontPair.first;
if ( fontPair.second != null ) {
contn = new Continuation<TextStyling>( contn ) {
@Override
public void updateStep( TextStyling base, Feature f, XPathEvaluator<Feature> evaluator ) {
fontPair.second.evaluate( base.font, f, evaluator );
}
};
}
}
} else if ( in.getLocalName().equals( "Fill" ) ) {
final Pair<Fill, Continuation<Fill>> fillPair = parseFill( in );
if ( fillPair != null ) {
baseOrEvaluated.fill = fillPair.first;
if ( fillPair.second != null ) {
contn = new Continuation<TextStyling>( contn ) {
@Override
public void updateStep( TextStyling base, Feature f, XPathEvaluator<Feature> evaluator ) {
fillPair.second.evaluate( base.fill, f, evaluator );
}
};
}
}
} else if ( in.isStartElement() ) {
Location loc = in.getLocation();
LOG.error( "Found unknown element '{}' at line {}, column {}, skipping.",
new Object[] { in.getLocalName(), loc.getLineNumber(), loc.getColumnNumber() } );
skipElement( in );
}
}
if ( contn == null ) {
Symbolizer<TextStyling> sym = new Symbolizer<TextStyling>( baseOrEvaluated, common.geometry, common.name,
common.loc, common.line, common.col );
return new Triple<Symbolizer<TextStyling>, Continuation<StringBuffer>, String>( sym, label, xmlText );
}
Symbolizer<TextStyling> sym = new Symbolizer<TextStyling>( baseOrEvaluated, contn, common.geometry,
common.name, common.loc, common.line, common.col );
return new Triple<Symbolizer<TextStyling>, Continuation<StringBuffer>, String>( sym, label, xmlText );
}
private Pair<Font, Continuation<Font>> parseFont( XMLStreamReader in )
throws XMLStreamException {
in.require( START_ELEMENT, null, "Font" );
Font baseOrEvaluated = new Font();
Continuation<Font> contn = null;
while ( !( in.isEndElement() && in.getLocalName().equals( "Font" ) ) ) {
in.nextTag();
if ( in.getLocalName().endsWith( "Parameter" ) ) {
String name = in.getAttributeValue( null, "name" );
if ( name.equals( "font-family" ) ) {
contn = updateOrContinue( in, "Parameter", baseOrEvaluated, new Updater<Font>() {
@Override
public void update( Font obj, String val ) {
obj.fontFamily.add( val );
}
}, contn ).second;
} else if ( name.equals( "font-style" ) ) {
contn = updateOrContinue( in, "Parameter", baseOrEvaluated, new Updater<Font>() {
@Override
public void update( Font obj, String val ) {
obj.fontStyle = Style.valueOf( val.toUpperCase() );
}
}, contn ).second;
} else if ( name.equals( "font-weight" ) ) {
contn = updateOrContinue( in, "Parameter", baseOrEvaluated, new Updater<Font>() {
@Override
public void update( Font obj, String val ) {
obj.bold = val.equalsIgnoreCase( "bold" );
}
}, contn ).second;
} else if ( name.equals( "font-size" ) ) {
contn = updateOrContinue( in, "Parameter", baseOrEvaluated, new Updater<Font>() {
@Override
public void update( Font obj, String val ) {
obj.fontSize = Double.parseDouble( val );
}
}, contn ).second;
} else if ( name.equals( "font-color" ) ) {
skipElement( in );
LOG.warn( "The non-standard font-color Svg/CssParameter is not supported any more. Use a standard Fill element instead." );
} else {
in.getElementText();
LOG.warn( "The non-standard '{}' Svg/CssParameter is not supported.", name );
}
}
}
return new Pair<Font, Continuation<Font>>( baseOrEvaluated, contn );
}
private Pair<Halo, Continuation<Halo>> parseHalo( XMLStreamReader in )
throws XMLStreamException {
in.require( START_ELEMENT, null, "Halo" );
Halo baseOrEvaluated = new Halo();
Continuation<Halo> contn = null;
while ( !( in.isEndElement() && in.getLocalName().equals( "Halo" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "Radius" ) ) {
contn = updateOrContinue( in, "Radius", baseOrEvaluated, new Updater<Halo>() {
@Override
public void update( Halo obj, String val ) {
obj.radius = Double.parseDouble( val );
}
}, contn ).second;
}
if ( in.getLocalName().equals( "Fill" ) ) {
final Pair<Fill, Continuation<Fill>> fillPair = parseFill( in );
if ( fillPair != null ) {
baseOrEvaluated.fill = fillPair.first;
if ( fillPair.second != null ) {
contn = new Continuation<Halo>( contn ) {
@Override
public void updateStep( Halo base, Feature f, XPathEvaluator<Feature> evaluator ) {
fillPair.second.evaluate( base.fill, f, evaluator );
}
};
}
}
}
}
return new Pair<Halo, Continuation<Halo>>( baseOrEvaluated, contn );
}
private static PerpendicularOffsetType getPerpendicularOffsetType( XMLStreamReader in ) {
PerpendicularOffsetType tp = new PerpendicularOffsetType();
String type = in.getAttributeValue( null, "type" );
if ( type != null ) {
try {
tp.type = Type.valueOf( type );
} catch ( IllegalArgumentException e ) {
LOG.debug( "Stack trace:", e );
LOG.warn( "The value '{}' is not a valid type for perpendicular offsets. Valid types are: {}", type,
Arrays.toString( Type.values() ) );
}
}
String substraction = in.getAttributeValue( null, "substraction" );
if ( substraction != null ) {
try {
tp.substraction = Substraction.valueOf( substraction );
} catch ( IllegalArgumentException e ) {
LOG.debug( "Stack trace:", e );
LOG.warn( "The value '{}' is not a valid substraction type for perpendicular offsets."
+ " Valid types are: {}", substraction, Arrays.toString( Substraction.values() ) );
}
}
return tp;
}
private Pair<LinePlacement, Continuation<LinePlacement>> parseLinePlacement( XMLStreamReader in )
throws XMLStreamException {
in.require( START_ELEMENT, null, "LinePlacement" );
LinePlacement baseOrEvaluated = new LinePlacement();
Continuation<LinePlacement> contn = null;
while ( !( in.isEndElement() && in.getLocalName().equals( "LinePlacement" ) ) ) {
in.nextTag();
if ( in.getLocalName().equals( "PerpendicularOffset" ) ) {
baseOrEvaluated.perpendicularOffsetType = getPerpendicularOffsetType( in );
contn = updateOrContinue( in, "PerpendicularOffset", baseOrEvaluated, new Updater<LinePlacement>() {
@Override
public void update( LinePlacement obj, String val ) {
obj.perpendicularOffset = Double.parseDouble( val );
}
}, contn ).second;
}
if ( in.getLocalName().equals( "InitialGap" ) ) {
contn = updateOrContinue( in, "InitialGap", baseOrEvaluated, new Updater<LinePlacement>() {
@Override
public void update( LinePlacement obj, String val ) {
obj.initialGap = Double.parseDouble( val );
}
}, contn ).second;
}
if ( in.getLocalName().equals( "Gap" ) ) {
contn = updateOrContinue( in, "Gap", baseOrEvaluated, new Updater<LinePlacement>() {
@Override
public void update( LinePlacement obj, String val ) {
obj.gap = Double.parseDouble( val );
}
}, contn ).second;
}
if ( in.getLocalName().equals( "GeneralizeLine" ) ) {
contn = updateOrContinue( in, "GeneralizeLine", baseOrEvaluated, new Updater<LinePlacement>() {
@Override
public void update( LinePlacement obj, String val ) {
obj.generalizeLine = Boolean.parseBoolean( val );
}
}, contn ).second;
}
if ( in.getLocalName().equals( "IsAligned" ) ) {
contn = updateOrContinue( in, "IsAligned", baseOrEvaluated, new Updater<LinePlacement>() {
@Override
public void update( LinePlacement obj, String val ) {
obj.isAligned = Boolean.parseBoolean( val );
}
}, contn ).second;
}
if ( in.getLocalName().equals( "IsRepeated" ) ) {
contn = updateOrContinue( in, "IsRepeated", baseOrEvaluated, new Updater<LinePlacement>() {
@Override
public void update( LinePlacement obj, String val ) {
obj.repeat = Boolean.parseBoolean( val );
}
}, contn ).second;
}
}
return new Pair<LinePlacement, Continuation<LinePlacement>>( baseOrEvaluated, contn );
}
/**
* @param in
* @return null, if no symbolizer and no Feature type style was found
* @throws XMLStreamException
*/
public org.deegree.rendering.r2d.se.unevaluated.Style parse( XMLStreamReader in )
throws XMLStreamException {
if ( in.getEventType() == START_DOCUMENT ) {
in.nextTag();
}
if ( in.getLocalName().endsWith( "Symbolizer" ) ) {
Triple<Symbolizer<?>, Continuation<StringBuffer>, String> pair = parseSymbolizer( in );
return new org.deegree.rendering.r2d.se.unevaluated.Style( pair.first, pair.second, pair.first.getName(),
pair.third );
}
if ( in.getLocalName().equals( "FeatureTypeStyle" ) ) {
return parseFeatureTypeOrCoverageStyle( in );
}
LOG.warn( "Symbology file '{}' did not contain symbolizer or feature type style.",
in.getLocation().getSystemId() );
return null;
}
/**
* @param in
* @return a new style
* @throws XMLStreamException
*/
public org.deegree.rendering.r2d.se.unevaluated.Style parseFeatureTypeOrCoverageStyle( XMLStreamReader in )
throws XMLStreamException {
if ( in.getLocalName().equals( "OnlineResource" ) ) {
try {
URL url = SymbologyParser.parseOnlineResource( in );
XMLStreamReader newReader = XMLInputFactory.newInstance().createXMLStreamReader( url.toString(),
url.openStream() );
return parseFeatureTypeOrCoverageStyle( newReader );
} catch ( MalformedURLException e ) {
LOG.warn( "An URL referencing a FeatureType or CoverageStyle could not be resolved." );
LOG.debug( "Stack trace:", e );
} catch ( FactoryConfigurationError e ) {
LOG.warn( "An URL referencing a FeatureType or CoverageStyle could not be read." );
LOG.debug( "Stack trace:", e );
} catch ( IOException e ) {
LOG.warn( "An URL referencing a FeatureType or CoverageStyle could not be read." );
LOG.debug( "Stack trace:", e );
}
}
LinkedList<Pair<Continuation<LinkedList<Symbolizer<?>>>, DoublePair>> result = new LinkedList<Pair<Continuation<LinkedList<Symbolizer<?>>>, DoublePair>>();
HashMap<Symbolizer<TextStyling>, Continuation<StringBuffer>> labels = new HashMap<Symbolizer<TextStyling>, Continuation<StringBuffer>>();
HashMap<Symbolizer<TextStyling>, String> labelXMLTexts = collectXMLSnippets ? new HashMap<Symbolizer<TextStyling>, String>()
: null;
Common common = new Common( in.getLocation() );
QName featureTypeName = null;
while ( !( in.isEndElement() && ( in.getLocalName().equals( "FeatureTypeStyle" ) || in.getLocalName().equals(
"CoverageStyle" ) ) ) ) {
in.nextTag();
checkCommon( common, in );
// TODO unused
if ( in.getLocalName().equals( "SemanticTypeIdentifier" ) ) {
in.getElementText(); // AndThrowItAwayImmediately
}
if ( in.getLocalName().equals( "FeatureTypeName" ) ) {
featureTypeName = getElementTextAsQName( in );
}
// TODO unused
if ( in.getLocalName().equals( "CoverageName" ) ) {
in.getElementText(); // AndThrowItAwayImmediately
}
if ( in.getLocalName().equals( "Rule" ) || in.getLocalName().equals( "OnlineResource" ) ) {
XMLStreamReader localReader = in;
if ( in.getLocalName().equals( "OnlineResource" ) ) {
try {
URL url = parseOnlineResource( in );
localReader = XMLInputFactory.newInstance().createXMLStreamReader( url.toString(),
url.openStream() );
} catch ( IOException e ) {
LOG.warn( "Error '{}' while resolving/accessing remote Rule document.", e.getLocalizedMessage() );
LOG.debug( "Stack trace:", e );
}
}
Common ruleCommon = new Common( in.getLocation() );
double minScale = NEGATIVE_INFINITY;
double maxScale = POSITIVE_INFINITY;
Filter filter = null;
LinkedList<Symbolizer<?>> syms = new LinkedList<Symbolizer<?>>();
while ( !( localReader.isEndElement() && localReader.getLocalName().equals( "Rule" ) ) ) {
localReader.nextTag();
checkCommon( ruleCommon, localReader );
if ( localReader.getLocalName().equals( "Filter" ) ) {
filter = Filter110XMLDecoder.parse( localReader );
}
if ( localReader.getLocalName().equals( "ElseFilter" ) ) {
filter = ELSEFILTER;
localReader.nextTag();
}
if ( localReader.getLocalName().equals( "MinScaleDenominator" ) ) {
minScale = parseDouble( localReader.getElementText() );
}
if ( localReader.getLocalName().equals( "MaxScaleDenominator" ) ) {
maxScale = parseDouble( localReader.getElementText() );
}
// TODO legendgraphic
if ( localReader.isStartElement() && localReader.getLocalName().endsWith( "Symbolizer" ) ) {
Triple<Symbolizer<?>, Continuation<StringBuffer>, String> parsedSym = parseSymbolizer( localReader );
if ( parsedSym.second != null ) {
labels.put( (Symbolizer) parsedSym.first, parsedSym.second );
}
if ( collectXMLSnippets && parsedSym.third != null ) {
labelXMLTexts.put( (Symbolizer) parsedSym.first, parsedSym.third );
}
syms.add( parsedSym.first );
}
}
FilterContinuation contn = new FilterContinuation( filter, syms, ruleCommon );
DoublePair scales = new DoublePair( minScale, maxScale );
result.add( new Pair<Continuation<LinkedList<Symbolizer<?>>>, DoublePair>( contn, scales ) );
}
}
return new org.deegree.rendering.r2d.se.unevaluated.Style( result, labels, labelXMLTexts, common.name,
featureTypeName );
}
static class ElseFilter implements Filter {
@Override
public <T> boolean evaluate( T object, XPathEvaluator<T> evaluator )
throws FilterEvaluationException {
return false; // always to false, has to be checked differently, see FilterContinuation below
}
@Override
public Type getType() {
return null;
}
}
/**
* <code>FilterContinuation</code>
*
* @author <a href="mailto:[email protected]">Andreas Schmitz</a>
* @author last edited by: $Author$
*
* @version $Revision$, $Date$
*/
public static class FilterContinuation extends Continuation<LinkedList<Symbolizer<?>>> {
/***/
public Filter filter;
private LinkedList<Symbolizer<?>> syms;
/** Contains description and so on. */
public Common common;
public FilterContinuation( Filter filter, LinkedList<Symbolizer<?>> syms, Common common ) {
this.filter = filter;
this.syms = syms;
this.common = common;
}
@Override
public void updateStep( LinkedList<Symbolizer<?>> base, Feature f, XPathEvaluator<Feature> evaluator ) {
try {
if ( filter == null || f == null || filter.evaluate( f, evaluator )
|| ( base.isEmpty() && filter == ELSEFILTER ) ) {
base.addAll( syms );
}
} catch ( FilterEvaluationException e ) {
LOG.warn( get( "R2D.ERROR_EVAL" ), e.getLocalizedMessage(), filter.toString() );
LOG.debug( "Stack trace:", e );
}
}
}
/**
* <code>Common</code>
*
* @author <a href="mailto:[email protected]">Andreas Schmitz</a>
* @author last edited by: $Author$
*
* @version $Revision$, $Date$
*/
public static class Common {
public Common() {
// without location
}
Common( Location loc ) {
this.loc = loc.getSystemId();
line = loc.getLineNumber();
col = loc.getColumnNumber();
}
/***/
public String name;
/***/
public String title;
/***/
public String abstract_;
Expression geometry;
String loc;
int line, col;
}
} | allow any whitespace in dasharrays
| deegree-core/deegree-core-rendering-2d/src/main/java/org/deegree/rendering/r2d/se/parser/SymbologyParser.java | allow any whitespace in dasharrays |
|
Java | apache-2.0 | 8fe68f8519ad46e80948f522efe5018e288fa4e0 | 0 | rodionmoiseev/c10n | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package c10n;
import java.util.Locale;
import static c10n.share.utils.Preconditions.assertNotNull;
/**
* @author rodion
*/
public final class C10NFilters {
/**
* <p>Create an Enum-to-methods mapping filter to ease Enum translation using C10N.</p>
*
* <h3>Basic Usage</h3>
* <p>Consider the following Enum type with 3 values:</p>
* <code><pre>
* enum Status{
* Open, Closed, Pending
* }
* </pre></code>
*
* <p>In order to localise each of the values, first create a c10n interface with a method for
* each of the Status values</p>
* <code><pre>
* @C10NMessages
* public interface StatusMsg {
* @En("open")
* @Ja("未着手")
* String open();
*
* @En("closed! beer time!")
* @Ja("完了")
* String closed();
*
* @En("pending ...")
* @Ja("進行中")
* String pending();
* }
* </pre></code>
*
* Then, in your c10n configuration add an enum-filter binding for the Status type
*
* <code><pre>
* void configure(){
* bindFilter(C10NFilters.enumMapping(Status.class, StatusMsg.class), Status.class);
* }
* </pre></code>
*
* <p>Now, every time c10n encounters Status as a method argument in a c10n-interface type,
* the it will be replaced with the appropriate localised version of the Enum value:</p>
*
* <code><pre>
* @C10NMessages
* public interface Messages{
* @En("Localised status is: {0}")
* @Ja("状態は{0}")
* String showStatus(Status status);
* }
* </pre></code>
*
* <p>Invoking <code>showStatus(Status.Closed)</code> will render
* as <code>"Localised status is: closed! beer time!"</code>.</p>
*
* <h2>Method Mapping Rules</h2>
* <p>Enum values are mapped to c10n-interface methods in the following order:
* <ol>
* <li>Method name matches "<Enum class name>_<Enum value name>". e.g <code>status_open()</code></li>
* <li>Method name matches "<Enum value name>" e.g. <code>closed()</code></li>
* </ol>
* </p>
* <p><i>Note:</i> method mapping is case-insensitive.</p>
* <p><i>Note:</i> mapped methods cannot take any arguments. Methods with arguments will be excluded from mapping.</p>
* <p><i>Warning:</i> if mapping for one or more values is not found, a runtime exception will be thrown.</p>
*
* @param enumClass Enum type to create mapping for
* @param c10nMappedInterface a c10n-interface containing mapped methods
* @param <E> Enum type
* @return a non-cached provider of enum mapping filter
*/
public static <E extends Enum<?>> C10NFilterProvider<E> enumMapping(Class<E> enumClass, Class<?> c10nMappedInterface) {
return new EnumMappingFilterProvider<E>(enumClass, c10nMappedInterface);
}
/**
* <p>Filter provider that always returns the specified instance</p>
* @param filter filter instance to return from the generated provider(not-null)
* @param <T> Filter argument type
* @return instance of filter provider (never-null)
*/
public static <T> C10NFilterProvider<T> staticFilterProvider(C10NFilter<T> filter){
assertNotNull(filter, "filter");
return new StaticC10NFilterProvider<T>(filter);
}
/**
* <p>Decorates the specified filter provider with a simple static cache.
* Only the first call will result in an execution of {@link c10n.C10NFilterProvider#get()} method.
* The following calls will always return a cached instance of the first call.</p>
* @param filterProvider filter provider to decorate with caching (not-null)
* @param <T> Filter argument type
* @return instance of a filter provider decorated with simple static cache (never-null)
*/
public static <T> C10NFilterProvider<T> cachedFilterProvider(C10NFilterProvider<T> filterProvider){
assertNotNull(filterProvider, "filterProvider");
return new CachedC10NFilterProvider<T>(filterProvider);
}
private static final class StaticC10NFilterProvider<T> implements C10NFilterProvider<T>{
private final C10NFilter<T> filter;
private StaticC10NFilterProvider(C10NFilter<T> filter) {
this.filter = filter;
}
@Override
public C10NFilter<T> get() {
return filter;
}
}
private static final class CachedC10NFilterProvider<T> implements C10NFilterProvider<T>{
private final C10NFilterProvider<T> base;
private C10NFilter<T> thunk = null;
private CachedC10NFilterProvider(C10NFilterProvider<T> base) {
this.base = base;
}
@Override
public C10NFilter<T> get() {
if(null==thunk){
thunk = base.get();
}
return thunk;
}
}
/**
* @author rodion
*/
private static final class EnumMappingFilterProvider<E extends Enum<?>> implements C10NFilterProvider<E> {
private final Class<E> enumClass;
private final Class<?> c10nMappedInterface;
EnumMappingFilterProvider(Class<E> enumClass, Class<?> c10nMappedInterface) {
this.enumClass = enumClass;
this.c10nMappedInterface = c10nMappedInterface;
}
@Override
public C10NFilter<E> get() {
return new EnumMappingFilter<E>(enumClass, c10nMappedInterface);
}
}
}
| core/src/main/java/c10n/C10NFilters.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package c10n;
import java.util.Locale;
import static c10n.share.utils.Preconditions.assertNotNull;
/**
* @author rodion
*/
public final class C10NFilters {
/**
* <p>Create an Enum-to-methods mapping filter to ease Enum translation using C10N.</p>
*
* <h3>Basic Usage</h3>
* <p>Consider the following Enum type with 3 values:</p>
* <code><pre>
* enum Status{
* Open, Closed, Pending
* }
* </pre></code>
*
* <p>In order to localise each of the values, first create a c10n interface with a method for
* each of the Status values</p>
* <code><pre>
* @C10NMessages
* public interface StatusMsg {
* @En("open")
* @Ja("未着手")
* String open();
*
* @En("closed! beer time!")
* @Ja("完了")
* String closed();
*
* @En("pending ...")
* @Ja("進行中")
* String pending();
* }
* </pre></code>
*
* Then, in your c10n configuration add an enum-filter binding for the Status type
*
* <code><pre>
* void configure(){
* bindFilter(C10NFilters.enumMapping(Status.class, StatusMsg.class), Status.class);
* }
* </pre></code>
*
* <p>Now, everytime c10n encounteres Status as a method argument in a c10n-interface type,
* the it will be replaced with the appropriate localised version of the Enum value:</p>
*
* <code><pre>
* @C10NMessages
* public interface Messages{
* @En("Localised status is: {0}")
* @Ja("状態は{0}")
* String showStatus(Status status);
* }
* </pre></code>
*
* <p>Invoking <code>showStatus(Status.Closed)</code> will render
* as <code>"Localised status is: closed! beer time!"</code>.</p>
*
* <h2>Method Mapping Rules</h2>
* <p>Enum values are mapped to c10n-interface methods in the following order:
* <ol>
* <li>Method name matches "<Enum class name>_<Enum value name>". e.g <code>status_open()</code></li>
* <li>Method name matches "<Enum value name>" e.g. <code>closed()</code></li>
* </ol>
* </p>
* <p><i>Note:</i> method mapping is case-insensitive.</p>
* <p><i>Note:</i> mapped methods cannot take any arguments. Methods with arguments will be excluded from mapping.</p>
* <p><i>Warning:</i> if mapping for one or more values is not found, a runtime exception will be thrown.</p>
*
* @param enumClass Enum type to create mapping for
* @param c10nMappedInterface a c10n-interface containing mapped methods
* @param <E> Enum type
* @return a non-cached provider of enum mapping filter
*/
public static <E extends Enum<?>> C10NFilterProvider<E> enumMapping(Class<E> enumClass, Class<?> c10nMappedInterface) {
return new EnumMappingFilterProvider<E>(enumClass, c10nMappedInterface);
}
/**
* <p>Filter provider that always returns the specified instance</p>
* @param filter filter instance to return from the generated provider(not-null)
* @param <T> Filter argument type
* @return instance of filter provider (never-null)
*/
public static <T> C10NFilterProvider<T> staticFilterProvider(C10NFilter<T> filter){
assertNotNull(filter, "filter");
return new StaticC10NFilterProvider<T>(filter);
}
/**
* <p>Decorates the specified filter provider with a simple static cache.
* Only the first call will result in an execution of {@link c10n.C10NFilterProvider#get()} method.
* The following calls will always return a cached instance of the first call.</p>
* @param filterProvider filter provider to decorate with caching (not-null)
* @param <T> Filter argument type
* @return instance of a filter provider decorated with simple static cache (never-null)
*/
public static <T> C10NFilterProvider<T> cachedFilterProvider(C10NFilterProvider<T> filterProvider){
assertNotNull(filterProvider, "filterProvider");
return new CachedC10NFilterProvider<T>(filterProvider);
}
private static final class StaticC10NFilterProvider<T> implements C10NFilterProvider<T>{
private final C10NFilter<T> filter;
private StaticC10NFilterProvider(C10NFilter<T> filter) {
this.filter = filter;
}
@Override
public C10NFilter<T> get() {
return filter;
}
}
private static final class CachedC10NFilterProvider<T> implements C10NFilterProvider<T>{
private final C10NFilterProvider<T> base;
private C10NFilter<T> thunk = null;
private CachedC10NFilterProvider(C10NFilterProvider<T> base) {
this.base = base;
}
@Override
public C10NFilter<T> get() {
if(null==thunk){
thunk = base.get();
}
return thunk;
}
}
/**
* @author rodion
*/
private static final class EnumMappingFilterProvider<E extends Enum<?>> implements C10NFilterProvider<E> {
private final Class<E> enumClass;
private final Class<?> c10nMappedInterface;
EnumMappingFilterProvider(Class<E> enumClass, Class<?> c10nMappedInterface) {
this.enumClass = enumClass;
this.c10nMappedInterface = c10nMappedInterface;
}
@Override
public C10NFilter<E> get() {
return new EnumMappingFilter<E>(enumClass, c10nMappedInterface);
}
}
}
| Typo fix
| core/src/main/java/c10n/C10NFilters.java | Typo fix |
|
Java | apache-2.0 | f1371fb8fc57c7cc4240d8739182bf0ee3c31682 | 0 | rakeshmane/FOCUS |
package focus;
import javafx.application.Application;
import javafx.collections.ObservableList;
import javafx.geometry.Insets;
import javafx.geometry.Pos;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.ProgressIndicator;
import javafx.scene.control.TextArea;
import javafx.scene.layout.HBox;
import javafx.scene.layout.VBox;
import javafx.stage.Stage;
/**
*
* @author rakesh
*/
public class Main_Window extends Application {
@Override
public void start(Stage stage) {
TextArea keywords = new TextArea();
Button crawl = new Button("Search");
TextArea progress_text=new TextArea();
ProgressIndicator progress=new ProgressIndicator();
progress_text.setPrefColumnCount(50); //Width
progress_text.setPrefRowCount(50); //Height
keywords.setPrefColumnCount(8); //Width
keywords.setPrefRowCount(5); //Height
HBox horizontal_layout = new HBox();
VBox vertical_layout = new VBox();
vertical_layout.setAlignment(Pos.CENTER); // To align nodes (Button,SearchBox,ProgressIndicator) in center
vertical_layout.setSpacing(30);
horizontal_layout.setMargin(vertical_layout, new Insets(20, 20, 20, 20));
horizontal_layout.setMargin(progress_text, new Insets(20, 20, 20, 20));
ObservableList list_v=vertical_layout.getChildren();
list_v.addAll(keywords,crawl,progress);
ObservableList list_h = horizontal_layout.getChildren();
list_h.addAll(vertical_layout,progress_text);
Scene scene = new Scene(horizontal_layout);
//Setting title to the Stage
stage.setTitle("Main Window");
//Adding scene to the satge
stage.setScene(scene);
//Displaying the contents of the stage
stage.show();
}
/**
* @param args the command line arguments
*/
public static void main(String[] args) {
launch();
}
}
| GUI/Main_Window.java |
package focus;
import javafx.application.Application;
import javafx.collections.ObservableList;
import javafx.geometry.Insets;
import javafx.geometry.Pos;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.ProgressIndicator;
import javafx.scene.control.TextArea;
import javafx.scene.layout.HBox;
import javafx.scene.layout.VBox;
import javafx.stage.Stage;
/**
*
* @author rakesh
*/
public class Main_Window extends Application {
@Override
public void start(Stage stage) {
TextArea keywords = new TextArea();
Button crawl = new Button("Search");
TextArea progress_text=new TextArea();
ProgressIndicator progress=new ProgressIndicator();
//Instantiating the HBox class
progress_text.setPrefColumnCount(50); //Width
progress_text.setPrefRowCount(50); //Height
keywords.setPrefColumnCount(8); //Width
keywords.setPrefRowCount(5); //Height
HBox horizontal_layout = new HBox();
VBox vertical_layout = new VBox();
vertical_layout.setAlignment(Pos.CENTER); // To align nodes (Button,SearchBox,ProgressIndicator) in center
vertical_layout.setSpacing(30);
horizontal_layout.setMargin(vertical_layout, new Insets(20, 20, 20, 20));
horizontal_layout.setMargin(progress_text, new Insets(20, 20, 20, 20));
ObservableList list_v=vertical_layout.getChildren();
list_v.addAll(keywords,crawl,progress);
ObservableList list_h = horizontal_layout.getChildren();
list_h.addAll(vertical_layout,progress_text);
Scene scene = new Scene(horizontal_layout);
//Setting title to the Stage
stage.setTitle("Main Window");
//Adding scene to the satge
stage.setScene(scene);
//Displaying the contents of the stage
stage.show();
}
/**
* @param args the command line arguments
*/
public static void main(String[] args) {
launch();
}
}
| Update Main_Window.java | GUI/Main_Window.java | Update Main_Window.java |
|
Java | apache-2.0 | 3ef00cf22d42eb9a5d9923df69b1e092a5bb3e63 | 0 | henrichg/PhoneProfilesPlus | package sk.henrichg.phoneprofilesplus;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.Fragment;
import android.content.Context;
import android.content.Intent;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.support.v4.content.pm.ShortcutInfoCompat;
import android.support.v4.content.pm.ShortcutManagerCompat;
import android.support.v4.graphics.drawable.IconCompat;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.TextView;
import java.lang.ref.WeakReference;
import java.util.Collections;
import java.util.Comparator;
public class ShortcutCreatorListFragment extends Fragment {
private DataWrapper activityDataWrapper;
private ShortcutCreatorListAdapter profileListAdapter;
private ListView listView;
TextView textViewNoData;
private LinearLayout progressBar;
private WeakReference<LoadProfileListAsyncTask> asyncTaskContext;
public ShortcutCreatorListFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// this is really important in order to save the state across screen
// configuration changes for example
setRetainInstance(true);
activityDataWrapper = new DataWrapper(getActivity().getApplicationContext(), false, 0);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View rootView;
rootView = inflater.inflate(R.layout.shortcut_creator_list, container, false);
return rootView;
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
doOnViewCreated(view);
}
private void doOnViewCreated(View view/*, Bundle savedInstanceState*/)
{
listView = view.findViewById(R.id.shortcut_profiles_list);
textViewNoData = view.findViewById(R.id.shortcut_profiles_list_empty);
progressBar = view.findViewById(R.id.shortcut_profiles_list_linla_progress);
listView.setOnItemClickListener(new OnItemClickListener() {
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
createShortcut(position);
}
});
if (!activityDataWrapper.profileListFilled)
{
LoadProfileListAsyncTask asyncTask = new LoadProfileListAsyncTask(this);
this.asyncTaskContext = new WeakReference<>(asyncTask );
asyncTask.execute();
}
else
{
listView.setAdapter(profileListAdapter);
}
}
private static class LoadProfileListAsyncTask extends AsyncTask<Void, Void, Void> {
private final WeakReference<ShortcutCreatorListFragment> fragmentWeakRef;
private final DataWrapper dataWrapper;
private class ProfileComparator implements Comparator<Profile> {
public int compare(Profile lhs, Profile rhs) {
if (GlobalGUIRoutines.collator != null)
return GlobalGUIRoutines.collator.compare(lhs._name, rhs._name);
else
return 0;
}
}
private LoadProfileListAsyncTask (ShortcutCreatorListFragment fragment) {
this.fragmentWeakRef = new WeakReference<>(fragment);
this.dataWrapper = new DataWrapper(fragment.getActivity().getApplicationContext(), false, 0);
}
@Override
protected void onPreExecute()
{
super.onPreExecute();
ShortcutCreatorListFragment fragment = this.fragmentWeakRef.get();
if ((fragment != null) && (fragment.isAdded())) {
fragment.textViewNoData.setVisibility(View.GONE);
fragment.progressBar.setVisibility(View.VISIBLE);
}
}
@Override
protected Void doInBackground(Void... params) {
this.dataWrapper.fillProfileList(true, ApplicationPreferences.applicationActivatorPrefIndicator(this.dataWrapper.context));
Collections.sort(this.dataWrapper.profileList, new ProfileComparator());
// add restart events
Profile profile = DataWrapper.getNonInitializedProfile(this.dataWrapper.context.getString(R.string.menu_restart_events), "ic_action_events_restart_color|1|0|0", 0);
this.dataWrapper.profileList.add(0, profile);
return null;
}
@Override
protected void onPostExecute(Void response) {
super.onPostExecute(response);
ShortcutCreatorListFragment fragment = this.fragmentWeakRef.get();
if ((fragment != null) && (fragment.isAdded())) {
fragment.progressBar.setVisibility(View.GONE);
// get local profileList
this.dataWrapper.fillProfileList(true, ApplicationPreferences.applicationActivatorPrefIndicator(this.dataWrapper.context));
// set copy local profile list into activity profilesDataWrapper
fragment.activityDataWrapper.copyProfileList(this.dataWrapper);
fragment.profileListAdapter = new ShortcutCreatorListAdapter(fragment, fragment.activityDataWrapper);
fragment.listView.setAdapter(fragment.profileListAdapter);
}
}
}
private boolean isAsyncTaskPendingOrRunning() {
return this.asyncTaskContext != null &&
this.asyncTaskContext.get() != null &&
!this.asyncTaskContext.get().getStatus().equals(AsyncTask.Status.FINISHED);
}
@Override
public void onDestroy()
{
if (isAsyncTaskPendingOrRunning()) {
this.asyncTaskContext.get().cancel(true);
}
if (listView != null)
listView.setAdapter(null);
if (profileListAdapter != null)
profileListAdapter.release();
if (activityDataWrapper != null)
activityDataWrapper.invalidateDataWrapper();
activityDataWrapper = null;
super.onDestroy();
}
@SuppressLint("StaticFieldLeak")
private void createShortcut(final int position)
{
new AsyncTask<Void, Integer, Void>() {
Profile profile;
boolean isIconResourceID;
String iconIdentifier;
Bitmap profileBitmap;
Bitmap shortcutOverlayBitmap;
Bitmap profileShortcutBitmap;
String profileName;
String longLabel;
boolean useCustomColor;
Context context;
Intent shortcutIntent;
ShortcutInfoCompat.Builder shortcutBuilder;
@Override
protected void onPreExecute()
{
super.onPreExecute();
profile = activityDataWrapper.profileList.get(position);
context = getActivity().getApplicationContext();
if (profile != null) {
isIconResourceID = profile.getIsIconResourceID();
iconIdentifier = profile.getIconIdentifier();
profileName = profile._name;
longLabel = getString(R.string.shortcut_activate_profile) + profileName;
useCustomColor = profile.getUseCustomColorForIcon();
if (position == 0) {
profileName = getString(R.string.menu_restart_events);
longLabel = profileName;
}
} else {
isIconResourceID = true;
iconIdentifier = Profile.PROFILE_ICON_DEFAULT;
profileName = getString(R.string.profile_name_default);
longLabel = getString(R.string.shortcut_activate_profile) + profileName;
useCustomColor = false;
}
if (profileName.isEmpty())
profileName = " ";
if (position == 0) {
// restart events
shortcutIntent = new Intent(context, ActionForExternalApplicationActivity.class);
shortcutIntent.setAction(ActionForExternalApplicationActivity.ACTION_RESTART_EVENTS);
} else {
shortcutIntent = new Intent(context, BackgroundActivateProfileActivity.class);
shortcutIntent.setAction(Intent.ACTION_MAIN);
shortcutIntent.putExtra(PPApplication.EXTRA_STARTUP_SOURCE, PPApplication.STARTUP_SOURCE_SHORTCUT);
//noinspection ConstantConditions
shortcutIntent.putExtra(PPApplication.EXTRA_PROFILE_ID, profile._id);
}
/*
Intent intent = new Intent();
intent.putExtra(Intent.EXTRA_SHORTCUT_INTENT, shortcutIntent);
intent.putExtra(Intent.EXTRA_SHORTCUT_NAME, profileName);
*/
shortcutBuilder = new ShortcutInfoCompat.Builder(context, "profile_shortcut");
shortcutBuilder.setIntent(shortcutIntent);
shortcutBuilder.setShortLabel(profileName);
shortcutBuilder.setLongLabel(longLabel);
}
@Override
protected Void doInBackground(Void... params) {
if (isIconResourceID) {
//noinspection ConstantConditions
if (profile._iconBitmap != null)
profileBitmap = profile._iconBitmap;
else {
//int iconResource = getResources().getIdentifier(iconIdentifier, "drawable", context.getPackageName());
int iconResource = Profile.profileIconIdMap.get(iconIdentifier);
profileBitmap = BitmapFactory.decodeResource(getResources(), iconResource);
}
if (Build.VERSION.SDK_INT < 26)
shortcutOverlayBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.ic_shortcut_overlay);
} else {
Resources resources = getResources();
int height = (int) resources.getDimension(android.R.dimen.app_icon_size);
int width = (int) resources.getDimension(android.R.dimen.app_icon_size);
//Log.d("---- ShortcutCreatorListFragment.generateIconBitmap","resampleBitmapUri");
profileBitmap = BitmapManipulator.resampleBitmapUri(iconIdentifier, width, height, true, context);
if (profileBitmap == null) {
int iconResource = R.drawable.ic_profile_default;
profileBitmap = BitmapFactory.decodeResource(getResources(), iconResource);
}
if (Build.VERSION.SDK_INT < 26)
shortcutOverlayBitmap = BitmapManipulator.resampleResource(resources, R.drawable.ic_shortcut_overlay, width, height);
}
if (ApplicationPreferences.applicationWidgetIconColor(activityDataWrapper.context).equals("1")) {
int monochromeValue = 0xFF;
String applicationWidgetIconLightness = ApplicationPreferences.applicationWidgetIconLightness(activityDataWrapper.context);
if (applicationWidgetIconLightness.equals("0")) monochromeValue = 0x00;
if (applicationWidgetIconLightness.equals("25")) monochromeValue = 0x40;
if (applicationWidgetIconLightness.equals("50")) monochromeValue = 0x80;
if (applicationWidgetIconLightness.equals("75")) monochromeValue = 0xC0;
if (applicationWidgetIconLightness.equals("100")) monochromeValue = 0xFF;
if (isIconResourceID || useCustomColor) {
// icon is from resource or colored by custom color
profileBitmap = BitmapManipulator.monochromeBitmap(profileBitmap, monochromeValue/*, context*/);
} else
profileBitmap = BitmapManipulator.grayScaleBitmap(profileBitmap);
}
if (Build.VERSION.SDK_INT < 26)
profileShortcutBitmap = combineImages(profileBitmap, shortcutOverlayBitmap);
else
profileShortcutBitmap = profileBitmap;
//intent.putExtra(Intent.EXTRA_SHORTCUT_ICON, profileShortcutBitmap);
shortcutBuilder.setIcon(IconCompat.createWithBitmap(profileShortcutBitmap));
return null;
}
@Override
protected void onPostExecute(Void result)
{
super.onPostExecute(result);
//intent.setAction("com.android.launcher.action.INSTALL_SHORTCUT");
//context.sendBroadcast(intent);
ShortcutInfoCompat shortcutInfo = shortcutBuilder.build();
Intent intent = ShortcutManagerCompat.createShortcutResultIntent(context, shortcutInfo);
getActivity().setResult(Activity.RESULT_OK, intent);
getActivity().finish();
}
}.execute();
}
private Bitmap combineImages(Bitmap bitmap1, Bitmap bitmap2)
{
Bitmap combined;
int width;
int height;
width = bitmap2.getWidth();
height = bitmap2.getHeight();
combined = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(combined);
canvas.drawBitmap(bitmap1, 0f, 0f, null);
if (ApplicationPreferences.applicationShortcutEmblem(activityDataWrapper.context))
canvas.drawBitmap(bitmap2, 0f, 0f, null);
return combined;
}
}
| phoneProfilesPlus/src/main/java/sk/henrichg/phoneprofilesplus/ShortcutCreatorListFragment.java | package sk.henrichg.phoneprofilesplus;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.Fragment;
import android.content.Context;
import android.content.Intent;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v4.content.pm.ShortcutInfoCompat;
import android.support.v4.content.pm.ShortcutManagerCompat;
import android.support.v4.graphics.drawable.IconCompat;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.TextView;
import java.lang.ref.WeakReference;
import java.util.Collections;
import java.util.Comparator;
public class ShortcutCreatorListFragment extends Fragment {
private DataWrapper activityDataWrapper;
private ShortcutCreatorListAdapter profileListAdapter;
private ListView listView;
TextView textViewNoData;
private LinearLayout progressBar;
private WeakReference<LoadProfileListAsyncTask> asyncTaskContext;
public ShortcutCreatorListFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// this is really important in order to save the state across screen
// configuration changes for example
setRetainInstance(true);
activityDataWrapper = new DataWrapper(getActivity().getApplicationContext(), false, 0);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View rootView;
rootView = inflater.inflate(R.layout.shortcut_creator_list, container, false);
return rootView;
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
doOnViewCreated(view);
}
private void doOnViewCreated(View view/*, Bundle savedInstanceState*/)
{
listView = view.findViewById(R.id.shortcut_profiles_list);
textViewNoData = view.findViewById(R.id.shortcut_profiles_list_empty);
progressBar = view.findViewById(R.id.shortcut_profiles_list_linla_progress);
listView.setOnItemClickListener(new OnItemClickListener() {
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
createShortcut(position);
}
});
if (!activityDataWrapper.profileListFilled)
{
LoadProfileListAsyncTask asyncTask = new LoadProfileListAsyncTask(this);
this.asyncTaskContext = new WeakReference<>(asyncTask );
asyncTask.execute();
}
else
{
listView.setAdapter(profileListAdapter);
}
}
private static class LoadProfileListAsyncTask extends AsyncTask<Void, Void, Void> {
private final WeakReference<ShortcutCreatorListFragment> fragmentWeakRef;
private final DataWrapper dataWrapper;
private class ProfileComparator implements Comparator<Profile> {
public int compare(Profile lhs, Profile rhs) {
if (GlobalGUIRoutines.collator != null)
return GlobalGUIRoutines.collator.compare(lhs._name, rhs._name);
else
return 0;
}
}
private LoadProfileListAsyncTask (ShortcutCreatorListFragment fragment) {
this.fragmentWeakRef = new WeakReference<>(fragment);
this.dataWrapper = new DataWrapper(fragment.getActivity().getApplicationContext(), false, 0);
}
@Override
protected void onPreExecute()
{
super.onPreExecute();
ShortcutCreatorListFragment fragment = this.fragmentWeakRef.get();
if ((fragment != null) && (fragment.isAdded())) {
fragment.textViewNoData.setVisibility(View.GONE);
fragment.progressBar.setVisibility(View.VISIBLE);
}
}
@Override
protected Void doInBackground(Void... params) {
this.dataWrapper.fillProfileList(true, ApplicationPreferences.applicationActivatorPrefIndicator(this.dataWrapper.context));
Collections.sort(this.dataWrapper.profileList, new ProfileComparator());
// add restart events
Profile profile = DataWrapper.getNonInitializedProfile(this.dataWrapper.context.getString(R.string.menu_restart_events), "ic_action_events_restart_color|1|0|0", 0);
this.dataWrapper.profileList.add(0, profile);
return null;
}
@Override
protected void onPostExecute(Void response) {
super.onPostExecute(response);
ShortcutCreatorListFragment fragment = this.fragmentWeakRef.get();
if ((fragment != null) && (fragment.isAdded())) {
fragment.progressBar.setVisibility(View.GONE);
// get local profileList
this.dataWrapper.fillProfileList(true, ApplicationPreferences.applicationActivatorPrefIndicator(this.dataWrapper.context));
// set copy local profile list into activity profilesDataWrapper
fragment.activityDataWrapper.copyProfileList(this.dataWrapper);
fragment.profileListAdapter = new ShortcutCreatorListAdapter(fragment, fragment.activityDataWrapper);
fragment.listView.setAdapter(fragment.profileListAdapter);
}
}
}
private boolean isAsyncTaskPendingOrRunning() {
return this.asyncTaskContext != null &&
this.asyncTaskContext.get() != null &&
!this.asyncTaskContext.get().getStatus().equals(AsyncTask.Status.FINISHED);
}
@Override
public void onDestroy()
{
if (isAsyncTaskPendingOrRunning()) {
this.asyncTaskContext.get().cancel(true);
}
if (listView != null)
listView.setAdapter(null);
if (profileListAdapter != null)
profileListAdapter.release();
if (activityDataWrapper != null)
activityDataWrapper.invalidateDataWrapper();
activityDataWrapper = null;
super.onDestroy();
}
@SuppressLint("StaticFieldLeak")
private void createShortcut(final int position)
{
new AsyncTask<Void, Integer, Void>() {
Profile profile;
boolean isIconResourceID;
String iconIdentifier;
Bitmap profileBitmap;
Bitmap shortcutOverlayBitmap;
Bitmap profileShortcutBitmap;
String profileName;
String longLabel;
boolean useCustomColor;
Context context;
Intent shortcutIntent;
ShortcutInfoCompat.Builder shortcutBuilder;
@Override
protected void onPreExecute()
{
super.onPreExecute();
profile = activityDataWrapper.profileList.get(position);
context = getActivity().getApplicationContext();
if (profile != null) {
isIconResourceID = profile.getIsIconResourceID();
iconIdentifier = profile.getIconIdentifier();
profileName = profile._name;
longLabel = getString(R.string.shortcut_activate_profile) + profileName;
useCustomColor = profile.getUseCustomColorForIcon();
if (position == 0) {
profileName = getString(R.string.menu_restart_events);
longLabel = profileName;
}
} else {
isIconResourceID = true;
iconIdentifier = Profile.PROFILE_ICON_DEFAULT;
profileName = getString(R.string.profile_name_default);
longLabel = getString(R.string.shortcut_activate_profile) + profileName;
useCustomColor = false;
}
if (profileName.isEmpty())
profileName = " ";
if (position == 0) {
// restart events
shortcutIntent = new Intent(context, ActionForExternalApplicationActivity.class);
shortcutIntent.setAction(ActionForExternalApplicationActivity.ACTION_RESTART_EVENTS);
} else {
shortcutIntent = new Intent(context, BackgroundActivateProfileActivity.class);
shortcutIntent.setAction(Intent.ACTION_MAIN);
shortcutIntent.putExtra(PPApplication.EXTRA_STARTUP_SOURCE, PPApplication.STARTUP_SOURCE_SHORTCUT);
//noinspection ConstantConditions
shortcutIntent.putExtra(PPApplication.EXTRA_PROFILE_ID, profile._id);
}
/*
Intent intent = new Intent();
intent.putExtra(Intent.EXTRA_SHORTCUT_INTENT, shortcutIntent);
intent.putExtra(Intent.EXTRA_SHORTCUT_NAME, profileName);
*/
shortcutBuilder = new ShortcutInfoCompat.Builder(context, "profile_shortcut");
shortcutBuilder.setIntent(shortcutIntent);
shortcutBuilder.setShortLabel(profileName);
shortcutBuilder.setLongLabel(longLabel);
}
@Override
protected Void doInBackground(Void... params) {
if (isIconResourceID) {
//noinspection ConstantConditions
if (profile._iconBitmap != null)
profileBitmap = profile._iconBitmap;
else {
//int iconResource = getResources().getIdentifier(iconIdentifier, "drawable", context.getPackageName());
int iconResource = Profile.profileIconIdMap.get(iconIdentifier);
profileBitmap = BitmapFactory.decodeResource(getResources(), iconResource);
}
shortcutOverlayBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.ic_shortcut_overlay);
} else {
Resources resources = getResources();
int height = (int) resources.getDimension(android.R.dimen.app_icon_size);
int width = (int) resources.getDimension(android.R.dimen.app_icon_size);
//Log.d("---- ShortcutCreatorListFragment.generateIconBitmap","resampleBitmapUri");
profileBitmap = BitmapManipulator.resampleBitmapUri(iconIdentifier, width, height, true, context);
if (profileBitmap == null) {
int iconResource = R.drawable.ic_profile_default;
profileBitmap = BitmapFactory.decodeResource(getResources(), iconResource);
}
shortcutOverlayBitmap = BitmapManipulator.resampleResource(resources, R.drawable.ic_shortcut_overlay, width, height);
}
if (ApplicationPreferences.applicationWidgetIconColor(activityDataWrapper.context).equals("1")) {
int monochromeValue = 0xFF;
String applicationWidgetIconLightness = ApplicationPreferences.applicationWidgetIconLightness(activityDataWrapper.context);
if (applicationWidgetIconLightness.equals("0")) monochromeValue = 0x00;
if (applicationWidgetIconLightness.equals("25")) monochromeValue = 0x40;
if (applicationWidgetIconLightness.equals("50")) monochromeValue = 0x80;
if (applicationWidgetIconLightness.equals("75")) monochromeValue = 0xC0;
if (applicationWidgetIconLightness.equals("100")) monochromeValue = 0xFF;
if (isIconResourceID || useCustomColor) {
// icon is from resource or colored by custom color
profileBitmap = BitmapManipulator.monochromeBitmap(profileBitmap, monochromeValue/*, context*/);
} else
profileBitmap = BitmapManipulator.grayScaleBitmap(profileBitmap);
}
profileShortcutBitmap = combineImages(profileBitmap, shortcutOverlayBitmap);
//intent.putExtra(Intent.EXTRA_SHORTCUT_ICON, profileShortcutBitmap);
shortcutBuilder.setIcon(IconCompat.createWithBitmap(profileShortcutBitmap));
return null;
}
@Override
protected void onPostExecute(Void result)
{
super.onPostExecute(result);
//intent.setAction("com.android.launcher.action.INSTALL_SHORTCUT");
//context.sendBroadcast(intent);
ShortcutInfoCompat shortcutInfo = shortcutBuilder.build();
Intent intent = ShortcutManagerCompat.createShortcutResultIntent(context, shortcutInfo);
getActivity().setResult(Activity.RESULT_OK, intent);
getActivity().finish();
}
}.execute();
}
private Bitmap combineImages(Bitmap bitmap1, Bitmap bitmap2)
{
Bitmap combined;
int width;
int height;
width = bitmap2.getWidth();
height = bitmap2.getHeight();
combined = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(combined);
canvas.drawBitmap(bitmap1, 0f, 0f, null);
if (ApplicationPreferences.applicationShortcutEmblem(activityDataWrapper.context))
canvas.drawBitmap(bitmap2, 0f, 0f, null);
return combined;
}
}
| Do not add overlay bitmap into shortcut icon for Android >= 26.
| phoneProfilesPlus/src/main/java/sk/henrichg/phoneprofilesplus/ShortcutCreatorListFragment.java | Do not add overlay bitmap into shortcut icon for Android >= 26. |
|
Java | apache-2.0 | 8b4d0933818d236ddb176c2c16809d462f7738fa | 0 | githubcolin/Metamorphosis,killme2008/Metamorphosis,jarvisxiong/Metamorphosis,killme2008/Metamorphosis,fool-persen/Metamorphosis,xiaojiaqi/Metamorphosis,fogu/Metamorphosis,binarygu/Metamorphosis,fogu/Metamorphosis,fengshao0907/Metamorphosis,binarygu/Metamorphosis,xiaojiaqi/Metamorphosis,IBYoung/Metamorphosis,binarygu/Metamorphosis,fool-persen/Metamorphosis,ronaldo9grey/Metamorphosis,IBYoung/Metamorphosis,yuzhu712/Metamorphosis,IBYoung/Metamorphosis,makemyownlife/Metamorphosis,fengshao0907/Metamorphosis,272029252/Metamorphosis,ronaldo9grey/Metamorphosis,githubcolin/Metamorphosis,githubcolin/Metamorphosis,makemyownlife/Metamorphosis,jarvisxiong/Metamorphosis,binarygu/Metamorphosis,yuzhu712/Metamorphosis,killme2008/Metamorphosis,fengshao0907/Metamorphosis,fool-persen/Metamorphosis,makemyownlife/Metamorphosis,jarvisxiong/Metamorphosis,fool-persen/Metamorphosis,xiaojiaqi/Metamorphosis,272029252/Metamorphosis,binarygu/Metamorphosis,fool-persen/Metamorphosis,ronaldo9grey/Metamorphosis,fogu/Metamorphosis,githubcolin/Metamorphosis,fogu/Metamorphosis,githubcolin/Metamorphosis,fool-persen/Metamorphosis,fengshao0907/Metamorphosis,yuzhu712/Metamorphosis,IBYoung/Metamorphosis,makemyownlife/Metamorphosis,githubcolin/Metamorphosis,272029252/Metamorphosis,killme2008/Metamorphosis,ronaldo9grey/Metamorphosis,jarvisxiong/Metamorphosis,binarygu/Metamorphosis,IBYoung/Metamorphosis,272029252/Metamorphosis,xiaojiaqi/Metamorphosis,xiaojiaqi/Metamorphosis,xiaojiaqi/Metamorphosis,272029252/Metamorphosis,killme2008/Metamorphosis,272029252/Metamorphosis,IBYoung/Metamorphosis,fengshao0907/Metamorphosis,yuzhu712/Metamorphosis,jarvisxiong/Metamorphosis,makemyownlife/Metamorphosis,killme2008/Metamorphosis,ronaldo9grey/Metamorphosis,jarvisxiong/Metamorphosis,fogu/Metamorphosis,yuzhu712/Metamorphosis,fogu/Metamorphosis,ronaldo9grey/Metamorphosis,makemyownlife/Metamorphosis,yuzhu712/Metamorphosis,fengshao0907/Metamorphosis | /*
* (C) 2007-2012 Alibaba Group Holding Limited.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* Authors:
* wuhua <[email protected]> , boyan <[email protected]>
*/
package com.taobao.metamorphosis.client.consumer;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.FutureTask;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.I0Itec.zkclient.IZkChildListener;
import org.I0Itec.zkclient.IZkStateListener;
import org.I0Itec.zkclient.ZkClient;
import org.I0Itec.zkclient.exception.ZkNodeExistsException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.zookeeper.KeeperException.NoNodeException;
import org.apache.zookeeper.Watcher.Event.KeeperState;
import com.taobao.gecko.service.exception.NotifyRemotingException;
import com.taobao.metamorphosis.client.RemotingClientWrapper;
import com.taobao.metamorphosis.client.ZkClientChangedListener;
import com.taobao.metamorphosis.client.consumer.storage.OffsetStorage;
import com.taobao.metamorphosis.cluster.Broker;
import com.taobao.metamorphosis.cluster.Cluster;
import com.taobao.metamorphosis.cluster.Partition;
import com.taobao.metamorphosis.exception.MetaClientException;
import com.taobao.metamorphosis.network.RemotingUtils;
import com.taobao.metamorphosis.utils.MetaZookeeper;
import com.taobao.metamorphosis.utils.MetaZookeeper.ZKGroupDirs;
import com.taobao.metamorphosis.utils.MetaZookeeper.ZKGroupTopicDirs;
import com.taobao.metamorphosis.utils.ZkUtils;
import com.taobao.metamorphosis.utils.ZkUtils.ZKConfig;
/**
* ConsumerZookeeper
*
* @author boyan
* @Date 2011-4-26
* @author wuhua
* @Date 2011-6-26
*/
public class ConsumerZooKeeper implements ZkClientChangedListener {
protected ZkClient zkClient;
protected final ConcurrentHashMap<FetchManager, FutureTask<ZKLoadRebalanceListener>> consumerLoadBalanceListeners =
new ConcurrentHashMap<FetchManager, FutureTask<ZKLoadRebalanceListener>>();
private final RemotingClientWrapper remotingClient;
private final ZKConfig zkConfig;
protected final MetaZookeeper metaZookeeper;
public ConsumerZooKeeper(final MetaZookeeper metaZookeeper, final RemotingClientWrapper remotingClient,
final ZkClient zkClient, final ZKConfig zkConfig) {
super();
this.metaZookeeper = metaZookeeper;
this.zkClient = zkClient;
this.remotingClient = remotingClient;
this.zkConfig = zkConfig;
}
public void commitOffsets(final FetchManager fetchManager) {
final ZKLoadRebalanceListener listener = this.getBrokerConnectionListener(fetchManager);
if (listener != null) {
listener.commitOffsets();
}
}
public ZKLoadRebalanceListener getBrokerConnectionListener(final FetchManager fetchManager) {
final FutureTask<ZKLoadRebalanceListener> task = this.consumerLoadBalanceListeners.get(fetchManager);
if (task != null) {
try {
return task.get();
}
catch (final Exception e) {
log.error("ȡZKLoadRebalanceListenerʧ", e);
return null;
}
}
else {
return null;
}
}
/**
* ȡעconsumer
*
* @param fetchManager
*/
public void unRegisterConsumer(final FetchManager fetchManager) {
try {
final FutureTask<ZKLoadRebalanceListener> futureTask =
this.consumerLoadBalanceListeners.remove(fetchManager);
if (futureTask != null) {
final ZKLoadRebalanceListener listener = futureTask.get();
if (listener != null) {
// ύoffsets
listener.commitOffsets();
this.zkClient.unsubscribeStateChanges(new ZKSessionExpireListenner(listener));
final ZKGroupDirs dirs = this.metaZookeeper.new ZKGroupDirs(listener.consumerConfig.getGroup());
this.zkClient.unsubscribeChildChanges(dirs.consumerRegistryDir, listener);
log.info("unsubscribeChildChanges:" + dirs.consumerRegistryDir);
// ƳӶtopicķ仯
for (final String topic : listener.topicSubcriberRegistry.keySet()) {
final String partitionPath = this.metaZookeeper.brokerTopicsSubPath + "/" + topic;
this.zkClient.unsubscribeChildChanges(partitionPath, listener);
log.info("unsubscribeChildChanges:" + partitionPath);
}
// ɾownership
listener.releaseAllPartitionOwnership();
// ɾʱڵ
ZkUtils.deletePath(this.zkClient, listener.dirs.consumerRegistryDir + "/"
+ listener.consumerIdString);
}
}
}
catch (final InterruptedException e) {
Thread.interrupted();
log.error("Interrupted when unRegisterConsumer", e);
}
catch (final Exception e) {
log.error("Error in unRegisterConsumer,maybe error when registerConsumer", e);
}
}
/**
* עᶩ
*
* @throws Exception
*/
public void registerConsumer(final ConsumerConfig consumerConfig, final FetchManager fetchManager,
final ConcurrentHashMap<String/* topic */, SubscriberInfo> topicSubcriberRegistry,
final OffsetStorage offsetStorage, final LoadBalanceStrategy loadBalanceStrategy) throws Exception {
final FutureTask<ZKLoadRebalanceListener> task =
new FutureTask<ZKLoadRebalanceListener>(new Callable<ZKLoadRebalanceListener>() {
@Override
public ZKLoadRebalanceListener call() throws Exception {
final ZKGroupDirs dirs =
ConsumerZooKeeper.this.metaZookeeper.new ZKGroupDirs(consumerConfig.getGroup());
final String consumerUUID = ConsumerZooKeeper.this.getConsumerUUID(consumerConfig);
final String consumerUUIDString = consumerConfig.getGroup() + "_" + consumerUUID;
final ZKLoadRebalanceListener loadBalanceListener =
new ZKLoadRebalanceListener(fetchManager, dirs, consumerUUIDString, consumerConfig,
offsetStorage, topicSubcriberRegistry, loadBalanceStrategy);
return ConsumerZooKeeper.this.registerConsumerInternal(loadBalanceListener);
}
});
final FutureTask<ZKLoadRebalanceListener> existsTask =
this.consumerLoadBalanceListeners.putIfAbsent(fetchManager, task);
if (existsTask == null) {
task.run();
}
else {
throw new MetaClientException("Consumer has been already registed");
}
}
protected ZKLoadRebalanceListener registerConsumerInternal(final ZKLoadRebalanceListener loadBalanceListener)
throws UnknownHostException, InterruptedException, Exception {
final ZKGroupDirs dirs = this.metaZookeeper.new ZKGroupDirs(loadBalanceListener.consumerConfig.getGroup());
final String topicString = this.getTopicsString(loadBalanceListener.topicSubcriberRegistry);
if (this.zkClient == null) {
// ֱģʽ
loadBalanceListener.fetchManager.stopFetchRunner();
loadBalanceListener.fetchManager.resetFetchState();
// zkClientΪnullʹfetch
for (final String topic : loadBalanceListener.topicSubcriberRegistry.keySet()) {
final SubscriberInfo subInfo = loadBalanceListener.topicSubcriberRegistry.get(topic);
ConcurrentHashMap<Partition, TopicPartitionRegInfo> topicPartRegInfoMap =
loadBalanceListener.topicRegistry.get(topic);
if (topicPartRegInfoMap == null) {
topicPartRegInfoMap = new ConcurrentHashMap<Partition, TopicPartitionRegInfo>();
loadBalanceListener.topicRegistry.put(topic, topicPartRegInfoMap);
}
final Partition partition = new Partition(loadBalanceListener.consumerConfig.getPartition());
final long offset = loadBalanceListener.consumerConfig.getOffset();
final TopicPartitionRegInfo regInfo = new TopicPartitionRegInfo(topic, partition, offset);
topicPartRegInfoMap.put(partition, regInfo);
loadBalanceListener.fetchManager.addFetchRequest(new FetchRequest(new Broker(0,
loadBalanceListener.consumerConfig.getServerUrl()), 0L, regInfo, subInfo.getMaxSize()));
}
loadBalanceListener.fetchManager.startFetchRunner();
}
else {
// עconsumer id
ZkUtils.createEphemeralPathExpectConflict(this.zkClient, dirs.consumerRegistryDir + "/"
+ loadBalanceListener.consumerIdString, topicString);
// ͬһconsumerбǷб仯
this.zkClient.subscribeChildChanges(dirs.consumerRegistryDir, loadBalanceListener);
// ӶtopicķǷб仯
for (final String topic : loadBalanceListener.topicSubcriberRegistry.keySet()) {
final String partitionPath = this.metaZookeeper.brokerTopicsSubPath + "/" + topic;
ZkUtils.makeSurePersistentPathExists(this.zkClient, partitionPath);
this.zkClient.subscribeChildChanges(partitionPath, loadBalanceListener);
}
// zk client״̬ʱע
this.zkClient.subscribeStateChanges(new ZKSessionExpireListenner(loadBalanceListener));
// һΣҪȷbalance
loadBalanceListener.syncedRebalance();
}
return loadBalanceListener;
}
private String getTopicsString(final ConcurrentHashMap<String/* topic */, SubscriberInfo> topicSubcriberRegistry) {
final StringBuilder topicSb = new StringBuilder();
boolean wasFirst = true;
for (final String topic : topicSubcriberRegistry.keySet()) {
if (wasFirst) {
wasFirst = false;
topicSb.append(topic);
}
else {
topicSb.append(",").append(topic);
}
}
return topicSb.toString();
}
private final AtomicInteger counter = new AtomicInteger(0);
protected String getConsumerUUID(final ConsumerConfig consumerConfig) throws Exception {
String consumerUUID = null;
if (consumerConfig.getConsumerId() != null) {
consumerUUID = consumerConfig.getConsumerId();
}
else {
consumerUUID =
RemotingUtils.getLocalAddress() + "-" + System.currentTimeMillis() + "-"
+ this.counter.incrementAndGet();
}
return consumerUUID;
}
@Override
public void onZkClientChanged(final ZkClient newClient) {
this.zkClient = newClient;
// עconsumer
for (final FutureTask<ZKLoadRebalanceListener> task : this.consumerLoadBalanceListeners.values()) {
try {
final ZKLoadRebalanceListener listener = task.get();
// ҪеעϢֹעconsumerʧܵʱύoffset¸Ǹµoffset
listener.topicRegistry.clear();
log.info("re-register consumer to zk,group=" + listener.consumerConfig.getGroup());
this.registerConsumerInternal(listener);
}
catch (final Exception e) {
log.error("reRegister consumer failed", e);
}
}
}
class ZKSessionExpireListenner implements IZkStateListener {
private final String consumerIdString;
private final ZKLoadRebalanceListener loadBalancerListener;
public ZKSessionExpireListenner(final ZKLoadRebalanceListener loadBalancerListener) {
super();
this.consumerIdString = loadBalancerListener.consumerIdString;
this.loadBalancerListener = loadBalancerListener;
}
@Override
public void handleNewSession() throws Exception {
/**
* When we get a SessionExpired event, we lost all ephemeral nodes
* and zkclient has reestablished a connection for us. We need to
* release the ownership of the current consumer and re-register
* this consumer in the consumer registry and trigger a rebalance.
*/
;
log.info("ZK expired; release old broker parition ownership; re-register consumer " + this.consumerIdString);
this.loadBalancerListener.resetState();
ConsumerZooKeeper.this.registerConsumerInternal(this.loadBalancerListener);
;
// explicitly trigger load balancing for this consumer
this.loadBalancerListener.syncedRebalance();
}
@Override
public void handleStateChanged(final KeeperState state) throws Exception {
// do nothing, since zkclient will do reconnect for us.
}
@Override
public boolean equals(final Object obj) {
if (!(obj instanceof ZKSessionExpireListenner)) {
return false;
}
final ZKSessionExpireListenner other = (ZKSessionExpireListenner) obj;
return this.loadBalancerListener.equals(other.loadBalancerListener);
}
@Override
public int hashCode() {
return this.loadBalancerListener.hashCode();
}
}
static final Log log = LogFactory.getLog(ConsumerZooKeeper.class);
/**
* Consumer load balance listener for zookeeper. This is a internal class
* for consumer,you should not use it directly in your code.
*
* @author dennis<[email protected]>
*
*/
public class ZKLoadRebalanceListener implements IZkChildListener {
private final ZKGroupDirs dirs;
private final String group;
protected final String consumerIdString;
static final int MAX_N_RETRIES = 10;
private final LoadBalanceStrategy loadBalanceStrategy;
Map<String, List<String>> oldConsumersPerTopicMap = new HashMap<String, List<String>>();
Map<String, List<String>> oldPartitionsPerTopicMap = new HashMap<String, List<String>>();
private final Lock rebalanceLock = new ReentrantLock();
/**
* ĵtopicӦbroker,offsetϢ
*/
final ConcurrentHashMap<String/* topic */, ConcurrentHashMap<Partition, TopicPartitionRegInfo>> topicRegistry =
new ConcurrentHashMap<String, ConcurrentHashMap<Partition, TopicPartitionRegInfo>>();
/**
* ϢСϢ
*/
private final ConcurrentHashMap<String/* topic */, SubscriberInfo> topicSubcriberRegistry;
private final ConsumerConfig consumerConfig;
private final OffsetStorage offsetStorage;
private final FetchManager fetchManager;
Set<Broker> oldBrokerSet = new HashSet<Broker>();
private Cluster oldCluster = new Cluster();
public ZKLoadRebalanceListener(final FetchManager fetchManager, final ZKGroupDirs dirs,
final String consumerIdString, final ConsumerConfig consumerConfig, final OffsetStorage offsetStorage,
final ConcurrentHashMap<String/* topic */, SubscriberInfo> topicSubcriberRegistry,
final LoadBalanceStrategy loadBalanceStrategy) {
super();
this.fetchManager = fetchManager;
this.dirs = dirs;
this.consumerIdString = consumerIdString;
this.group = consumerConfig.getGroup();
this.consumerConfig = consumerConfig;
this.offsetStorage = offsetStorage;
this.topicSubcriberRegistry = topicSubcriberRegistry;
this.loadBalanceStrategy = loadBalanceStrategy;
}
/**
* offsetzk
*/
private void commitOffsets() {
this.offsetStorage.commitOffset(this.consumerConfig.getGroup(), this.getTopicPartitionRegInfos());
}
private TopicPartitionRegInfo initTopicPartitionRegInfo(final String topic, final String group,
final Partition partition, final long offset) {
this.offsetStorage.initOffset(topic, group, partition, offset);
return new TopicPartitionRegInfo(topic, partition, offset);
}
/**
* Returns current topic-partitions info.
*
* @since 1.4.4
* @return
*/
public Map<String/* topic */, Set<Partition>> getTopicPartitions() {
Map<String, Set<Partition>> rt = new HashMap<String, Set<Partition>>();
for (Map.Entry<String, ConcurrentHashMap<Partition, TopicPartitionRegInfo>> entry : this.topicRegistry
.entrySet()) {
rt.put(entry.getKey(), entry.getValue().keySet());
}
return rt;
}
List<TopicPartitionRegInfo> getTopicPartitionRegInfos() {
final List<TopicPartitionRegInfo> rt = new ArrayList<TopicPartitionRegInfo>();
for (final ConcurrentHashMap<Partition, TopicPartitionRegInfo> subMap : this.topicRegistry.values()) {
final Collection<TopicPartitionRegInfo> values = subMap.values();
if (values != null) {
rt.addAll(values);
}
}
return rt;
}
/**
* offsetϢ
*
* @param topic
* @param partition
* @return
*/
private TopicPartitionRegInfo loadTopicPartitionRegInfo(final String topic, final Partition partition) {
return this.offsetStorage.load(topic, this.consumerConfig.getGroup(), partition);
}
@Override
public void handleChildChange(final String parentPath, final List<String> currentChilds) throws Exception {
this.syncedRebalance();
}
void syncedRebalance() throws Exception {
this.rebalanceLock.lock();
try {
for (int i = 0; i < MAX_N_RETRIES; i++) {
log.info("begin rebalancing consumer " + this.consumerIdString + " try #" + i);
boolean done;
try {
done = this.rebalance();
}
catch (final Throwable e) {
// Ԥ֮쳣,һ,
// пǶconsumerͬʱrebalanceɵĶȡzkݲһ,-- wuhua
log.warn("unexpected exception occured while try rebalancing", e);
done = false;
}
log.info("end rebalancing consumer " + this.consumerIdString + " try #" + i);
if (done) {
log.info("rebalance success.");
return;
}
else {
log.warn("rebalance failed,try #" + i);
}
// release all partitions, reset state and retry
this.releaseAllPartitionOwnership();
this.resetState();
// ȴzkͬ
Thread.sleep(ConsumerZooKeeper.this.zkConfig.zkSyncTimeMs);
}
log.error("rebalance failed,finally");
}
finally {
this.rebalanceLock.unlock();
}
}
private void resetState() {
this.topicRegistry.clear();
this.oldConsumersPerTopicMap.clear();
this.oldPartitionsPerTopicMap.clear();
}
/**
* fetch߳
*
* @param cluster
*/
protected void updateFetchRunner(final Cluster cluster) throws Exception {
this.fetchManager.resetFetchState();
final Set<Broker> changedBrokers = new HashSet<Broker>();
for (final Map.Entry<String/* topic */, ConcurrentHashMap<Partition, TopicPartitionRegInfo>> entry : this.topicRegistry
.entrySet()) {
final String topic = entry.getKey();
for (final Map.Entry<Partition, TopicPartitionRegInfo> partEntry : entry.getValue().entrySet()) {
final Partition partition = partEntry.getKey();
final TopicPartitionRegInfo info = partEntry.getValue();
// ȡmasterslaveһ,wuhua
final Broker broker = cluster.getBrokerRandom(partition.getBrokerId());
if (broker != null) {
changedBrokers.add(broker);
final SubscriberInfo subscriberInfo = this.topicSubcriberRegistry.get(topic);
// fetch
this.fetchManager.addFetchRequest(new FetchRequest(broker, 0L, info, subscriberInfo
.getMaxSize()));
}
}
}
//
for (final Broker broker : changedBrokers) {
if (!this.oldBrokerSet.contains(broker)) {
try {
ConsumerZooKeeper.this.remotingClient.connect(broker.getZKString());
ConsumerZooKeeper.this.remotingClient.awaitReadyInterrupt(broker.getZKString());
log.info("Connect to " + broker.getZKString());
}
catch (final NotifyRemotingException e) {
log.error("Connect to " + broker.getZKString() + " failed", e);
}
catch (final InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
// ر
for (final Broker broker : this.oldBrokerSet) {
if (!changedBrokers.contains(broker)) {
try {
ConsumerZooKeeper.this.remotingClient.close(broker.getZKString(), false);
log.info("Closing " + broker.getZKString());
}
catch (final NotifyRemotingException e) {
log.error("Connect to " + broker.getZKString() + " failed", e);
}
}
}
// fetch߳
log.info("Starting fetch runners");
this.oldBrokerSet = changedBrokers;
this.fetchManager.startFetchRunner();
}
synchronized boolean rebalance() throws Exception {
final Map<String/* topic */, String/* consumerId */> myConsumerPerTopicMap =
this.getConsumerPerTopic(this.consumerIdString);
final Cluster cluster = ConsumerZooKeeper.this.metaZookeeper.getCluster();
Map<String/* topic */, List<String>/* consumer list */> consumersPerTopicMap = null;
try {
consumersPerTopicMap = this.getConsumersPerTopic(this.group);
}
catch (final NoNodeException e) {
// consumerͬʱڸؾʱ,ܻᵽ -- wuhua
log.warn("maybe other consumer is rebalancing now," + e.getMessage());
return false;
}
final Map<String, List<String>> partitionsPerTopicMap =
this.getPartitionStringsForTopics(myConsumerPerTopicMap);
final Map<String/* topic */, String/* consumer id */> relevantTopicConsumerIdMap =
this.getRelevantTopicMap(myConsumerPerTopicMap, partitionsPerTopicMap,
this.oldPartitionsPerTopicMap, consumersPerTopicMap, this.oldConsumersPerTopicMap);
// ûбƽ
if (relevantTopicConsumerIdMap.size() <= 0) {
// ,topicûб仯,һ̨,
// partitionsPerTopicMapûб仯,
// Ҫ鼯Ⱥı仯
if (this.checkClusterChange(cluster)) {
log.info("Stopping fetch runners,maybe master or slave changed");
this.fetchManager.stopFetchRunner();
this.updateFetchRunner(cluster);
this.oldCluster = cluster;
}
else {
log.info("Consumer " + this.consumerIdString + " with " + consumersPerTopicMap
+ " doesn't need to be rebalanced.");
}
return true;
}
log.info("Stopping fetch runners");
this.fetchManager.stopFetchRunner();
log.info("Comitting all offsets");
this.commitOffsets();
for (final Map.Entry<String, String> entry : relevantTopicConsumerIdMap.entrySet()) {
final String topic = entry.getKey();
final String consumerId = entry.getValue();
final ZKGroupTopicDirs topicDirs =
ConsumerZooKeeper.this.metaZookeeper.new ZKGroupTopicDirs(topic, this.group);
// ǰtopicĶ
final List<String> curConsumers = consumersPerTopicMap.get(topic);
// ǰtopicķ
final List<String> curPartitions = partitionsPerTopicMap.get(topic);
if (curConsumers == null) {
log.info("Releasing partition ownerships for topic:" + topic);
this.releasePartitionOwnership(topic);
this.topicRegistry.remove(topic);
log.info("There are no consumers subscribe topic " + topic);
continue;
}
if (curPartitions == null) {
log.info("Releasing partition ownerships for topic:" + topic);
this.releasePartitionOwnership(topic);
this.topicRegistry.remove(topic);
log.info("There are no partitions under topic " + topic);
continue;
}
// ݸؾԻȡconsumerӦpartitionб
final List<String> newParts =
this.loadBalanceStrategy.getPartitions(topic, consumerId, curConsumers, curPartitions);
// 鿴ǰtopicķб鿴Ƿб
ConcurrentHashMap<Partition, TopicPartitionRegInfo> partRegInfos = this.topicRegistry.get(topic);
if (partRegInfos == null) {
partRegInfos = new ConcurrentHashMap<Partition, TopicPartitionRegInfo>();
this.topicRegistry.put(topic, new ConcurrentHashMap<Partition, TopicPartitionRegInfo>());
}
final Set<Partition> currentParts = partRegInfos.keySet();
for (final Partition partition : currentParts) {
// µķбвڵķҪͷownerShipҲϵУµû
if (!newParts.contains(partition.toString())) {
log.info("Releasing partition ownerships for partition:" + partition);
partRegInfos.remove(partition);
this.releasePartitionOwnership(topic, partition);
}
}
for (final String partition : newParts) {
// ǰûеķȥҲµУϵû
if (!currentParts.contains(new Partition(partition))) {
log.info(consumerId + " attempting to claim partition " + partition);
// עownerϵ
if (!this.processPartition(topicDirs, partition, topic, consumerId)) {
return false;
}
}
}
}
this.updateFetchRunner(cluster);
this.oldPartitionsPerTopicMap = partitionsPerTopicMap;
this.oldConsumersPerTopicMap = consumersPerTopicMap;
this.oldCluster = cluster;
return true;
}
protected boolean checkClusterChange(final Cluster cluster) {
return !this.oldCluster.equals(cluster);
}
protected Map<String, List<String>> getPartitionStringsForTopics(final Map<String, String> myConsumerPerTopicMap) {
return ConsumerZooKeeper.this.metaZookeeper.getPartitionStringsForSubTopics(myConsumerPerTopicMap.keySet());
}
/**
* ӷownerϵ
*
* @param topicDirs
* @param partition
* @param topic
* @param consumerThreadId
* @return
*/
protected boolean processPartition(final ZKGroupTopicDirs topicDirs, final String partition,
final String topic, final String consumerThreadId) throws Exception {
final String partitionOwnerPath = topicDirs.consumerOwnerDir + "/" + partition;
try {
ZkUtils.createEphemeralPathExpectConflict(ConsumerZooKeeper.this.zkClient, partitionOwnerPath,
consumerThreadId);
}
catch (final ZkNodeExistsException e) {
// ԭʼĹϵӦѾɾԺ
log.info("waiting for the partition ownership to be deleted: " + partition);
return false;
}
catch (final Exception e) {
throw e;
}
this.addPartitionTopicInfo(topicDirs, partition, topic, consumerThreadId);
return true;
}
// ȡoffsetϢ浽
protected void addPartitionTopicInfo(final ZKGroupTopicDirs topicDirs, final String partitionString,
final String topic, final String consumerThreadId) {
final Partition partition = new Partition(partitionString);
final ConcurrentHashMap<Partition, TopicPartitionRegInfo> partitionTopicInfo =
this.topicRegistry.get(topic);
TopicPartitionRegInfo existsTopicPartitionRegInfo = this.loadTopicPartitionRegInfo(topic, partition);
if (existsTopicPartitionRegInfo == null) {
// ʼʱĬʹ0,TODO ܲ
existsTopicPartitionRegInfo =
this.initTopicPartitionRegInfo(topic, consumerThreadId, partition,
this.consumerConfig.getOffset());// Long.MAX_VALUE
}
partitionTopicInfo.put(partition, existsTopicPartitionRegInfo);
}
/**
* ͷŷȨ
*/
private void releaseAllPartitionOwnership() {
for (final Map.Entry<String, ConcurrentHashMap<Partition, TopicPartitionRegInfo>> entry : this.topicRegistry
.entrySet()) {
final String topic = entry.getKey();
final ZKGroupTopicDirs topicDirs =
ConsumerZooKeeper.this.metaZookeeper.new ZKGroupTopicDirs(topic, this.consumerConfig.getGroup());
for (final Partition partition : entry.getValue().keySet()) {
final String znode = topicDirs.consumerOwnerDir + "/" + partition;
this.deleteOwnership(znode);
}
}
}
/**
* ͷָownership
*
* @param topic
* @param partition
*/
private void releasePartitionOwnership(final String topic, final Partition partition) {
final ZKGroupTopicDirs topicDirs =
ConsumerZooKeeper.this.metaZookeeper.new ZKGroupTopicDirs(topic, this.consumerConfig.getGroup());
final String znode = topicDirs.consumerOwnerDir + "/" + partition;
this.deleteOwnership(znode);
}
private void deleteOwnership(final String znode) {
try {
ZkUtils.deletePath(ConsumerZooKeeper.this.zkClient, znode);
}
catch (final Throwable t) {
log.error("exception during releasePartitionOwnership", t);
}
if (log.isDebugEnabled()) {
log.debug("Consumer " + this.consumerIdString + " releasing " + znode);
}
}
/**
* ͷָtopicownership
*
* @param topic
* @param partition
*/
private void releasePartitionOwnership(final String topic) {
final ZKGroupTopicDirs topicDirs =
ConsumerZooKeeper.this.metaZookeeper.new ZKGroupTopicDirs(topic, this.consumerConfig.getGroup());
final ConcurrentHashMap<Partition, TopicPartitionRegInfo> partInfos = this.topicRegistry.get(topic);
if (partInfos != null) {
for (final Partition partition : partInfos.keySet()) {
final String znode = topicDirs.consumerOwnerDir + "/" + partition;
this.deleteOwnership(znode);
}
}
}
/**
* бtopicconsumer
*
* @param myConsumerPerTopicMap
* @param newPartMap
* @param oldPartMap
* @param newConsumerMap
* @param oldConsumerMap
* @return
*/
private Map<String, String> getRelevantTopicMap(final Map<String, String> myConsumerPerTopicMap,
final Map<String, List<String>> newPartMap, final Map<String, List<String>> oldPartMap,
final Map<String, List<String>> newConsumerMap, final Map<String, List<String>> oldConsumerMap) {
final Map<String, String> relevantTopicThreadIdsMap = new HashMap<String, String>();
for (final Map.Entry<String, String> entry : myConsumerPerTopicMap.entrySet()) {
final String topic = entry.getKey();
final String consumerId = entry.getValue();
// жϷ߶бǷ
if (!this.listEquals(oldPartMap.get(topic), newPartMap.get(topic))
|| !this.listEquals(oldConsumerMap.get(topic), newConsumerMap.get(topic))) {
relevantTopicThreadIdsMap.put(topic, consumerId);
}
}
return relevantTopicThreadIdsMap;
}
private boolean listEquals(final List<String> list1, final List<String> list2) {
if (list1 == null && list2 != null) {
return false;
}
if (list1 != null && list2 == null) {
return false;
}
if (list1 == null && list2 == null) {
return true;
}
return list1.equals(list2);
}
/**
* ȡij鶩ĵtopic֮ӳmap
*
* @param group
* @return
* @throws Exception
* @throws NoNodeException
* consumerͬʱڸؾʱ,ܻ׳NoNodeException
*/
protected Map<String, List<String>> getConsumersPerTopic(final String group) throws Exception, NoNodeException {
final List<String> consumers =
ZkUtils.getChildren(ConsumerZooKeeper.this.zkClient, this.dirs.consumerRegistryDir);
final Map<String, List<String>> consumersPerTopicMap = new HashMap<String, List<String>>();
for (final String consumer : consumers) {
final List<String> topics = this.getTopics(consumer);// consumerͬʱڸؾʱ,ܻ׳NoNodeException--wuhua
for (final String topic : topics) {
if (consumersPerTopicMap.get(topic) == null) {
final List<String> list = new ArrayList<String>();
list.add(consumer);
consumersPerTopicMap.put(topic, list);
}
else {
consumersPerTopicMap.get(topic).add(consumer);
}
}
}
//
for (final Map.Entry<String, List<String>> entry : consumersPerTopicMap.entrySet()) {
Collections.sort(entry.getValue());
}
return consumersPerTopicMap;
}
public Map<String, String> getConsumerPerTopic(final String consumerId) throws Exception {
final List<String> topics = this.getTopics(consumerId);
final Map<String/* topic */, String/* consumerId */> rt = new HashMap<String, String>();
for (final String topic : topics) {
rt.put(topic, consumerId);
}
return rt;
}
/**
* consumerIdȡĵtopicб
*
* @param consumerId
* @return
* @throws Exception
*/
protected List<String> getTopics(final String consumerId) throws Exception {
final String topicsString =
ZkUtils.readData(ConsumerZooKeeper.this.zkClient, this.dirs.consumerRegistryDir + "/" + consumerId);
final String[] topics = topicsString.split(",");
final List<String> rt = new ArrayList<String>(topics.length);
for (final String topic : topics) {
rt.add(topic);
}
return rt;
}
}
} | metamorphosis-client/src/main/java/com/taobao/metamorphosis/client/consumer/ConsumerZooKeeper.java | /*
* (C) 2007-2012 Alibaba Group Holding Limited.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* Authors:
* wuhua <[email protected]> , boyan <[email protected]>
*/
package com.taobao.metamorphosis.client.consumer;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.FutureTask;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.I0Itec.zkclient.IZkChildListener;
import org.I0Itec.zkclient.IZkStateListener;
import org.I0Itec.zkclient.ZkClient;
import org.I0Itec.zkclient.exception.ZkNodeExistsException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.zookeeper.KeeperException.NoNodeException;
import org.apache.zookeeper.Watcher.Event.KeeperState;
import com.taobao.gecko.service.exception.NotifyRemotingException;
import com.taobao.metamorphosis.client.RemotingClientWrapper;
import com.taobao.metamorphosis.client.ZkClientChangedListener;
import com.taobao.metamorphosis.client.consumer.storage.OffsetStorage;
import com.taobao.metamorphosis.cluster.Broker;
import com.taobao.metamorphosis.cluster.Cluster;
import com.taobao.metamorphosis.cluster.Partition;
import com.taobao.metamorphosis.exception.MetaClientException;
import com.taobao.metamorphosis.network.RemotingUtils;
import com.taobao.metamorphosis.utils.MetaZookeeper;
import com.taobao.metamorphosis.utils.MetaZookeeper.ZKGroupDirs;
import com.taobao.metamorphosis.utils.MetaZookeeper.ZKGroupTopicDirs;
import com.taobao.metamorphosis.utils.ZkUtils;
import com.taobao.metamorphosis.utils.ZkUtils.ZKConfig;
/**
* ConsumerZookeeper
*
* @author boyan
* @Date 2011-4-26
* @author wuhua
* @Date 2011-6-26
*/
public class ConsumerZooKeeper implements ZkClientChangedListener {
protected ZkClient zkClient;
protected final ConcurrentHashMap<FetchManager, FutureTask<ZKLoadRebalanceListener>> consumerLoadBalanceListeners =
new ConcurrentHashMap<FetchManager, FutureTask<ZKLoadRebalanceListener>>();
private final RemotingClientWrapper remotingClient;
private final ZKConfig zkConfig;
protected final MetaZookeeper metaZookeeper;
public ConsumerZooKeeper(final MetaZookeeper metaZookeeper, final RemotingClientWrapper remotingClient,
final ZkClient zkClient, final ZKConfig zkConfig) {
super();
this.metaZookeeper = metaZookeeper;
this.zkClient = zkClient;
this.remotingClient = remotingClient;
this.zkConfig = zkConfig;
}
public void commitOffsets(final FetchManager fetchManager) {
final ZKLoadRebalanceListener listener = this.getBrokerConnectionListener(fetchManager);
if (listener != null) {
listener.commitOffsets();
}
}
public ZKLoadRebalanceListener getBrokerConnectionListener(final FetchManager fetchManager) {
final FutureTask<ZKLoadRebalanceListener> task = this.consumerLoadBalanceListeners.get(fetchManager);
if (task != null) {
try {
return task.get();
}
catch (final Exception e) {
log.error("ȡZKLoadRebalanceListenerʧ", e);
return null;
}
}
else {
return null;
}
}
/**
* ȡעconsumer
*
* @param fetchManager
*/
public void unRegisterConsumer(final FetchManager fetchManager) {
try {
final FutureTask<ZKLoadRebalanceListener> futureTask =
this.consumerLoadBalanceListeners.remove(fetchManager);
if (futureTask != null) {
final ZKLoadRebalanceListener listener = futureTask.get();
if (listener != null) {
// ύoffsets
listener.commitOffsets();
this.zkClient.unsubscribeStateChanges(new ZKSessionExpireListenner(listener));
final ZKGroupDirs dirs = this.metaZookeeper.new ZKGroupDirs(listener.consumerConfig.getGroup());
this.zkClient.unsubscribeChildChanges(dirs.consumerRegistryDir, listener);
log.info("unsubscribeChildChanges:" + dirs.consumerRegistryDir);
// ƳӶtopicķ仯
for (final String topic : listener.topicSubcriberRegistry.keySet()) {
final String partitionPath = this.metaZookeeper.brokerTopicsSubPath + "/" + topic;
this.zkClient.unsubscribeChildChanges(partitionPath, listener);
log.info("unsubscribeChildChanges:" + partitionPath);
}
// ɾownership
listener.releaseAllPartitionOwnership();
// ɾʱڵ
ZkUtils.deletePath(this.zkClient, listener.dirs.consumerRegistryDir + "/"
+ listener.consumerIdString);
}
}
}
catch (final InterruptedException e) {
Thread.interrupted();
log.error("Interrupted when unRegisterConsumer", e);
}
catch (final Exception e) {
log.error("Error in unRegisterConsumer,maybe error when registerConsumer", e);
}
}
/**
* עᶩ
*
* @throws Exception
*/
public void registerConsumer(final ConsumerConfig consumerConfig, final FetchManager fetchManager,
final ConcurrentHashMap<String/* topic */, SubscriberInfo> topicSubcriberRegistry,
final OffsetStorage offsetStorage, final LoadBalanceStrategy loadBalanceStrategy) throws Exception {
final FutureTask<ZKLoadRebalanceListener> task =
new FutureTask<ZKLoadRebalanceListener>(new Callable<ZKLoadRebalanceListener>() {
@Override
public ZKLoadRebalanceListener call() throws Exception {
final ZKGroupDirs dirs =
ConsumerZooKeeper.this.metaZookeeper.new ZKGroupDirs(consumerConfig.getGroup());
final String consumerUUID = ConsumerZooKeeper.this.getConsumerUUID(consumerConfig);
final String consumerUUIDString = consumerConfig.getGroup() + "_" + consumerUUID;
final ZKLoadRebalanceListener loadBalanceListener =
new ZKLoadRebalanceListener(fetchManager, dirs, consumerUUIDString, consumerConfig,
offsetStorage, topicSubcriberRegistry, loadBalanceStrategy);
return ConsumerZooKeeper.this.registerConsumerInternal(loadBalanceListener);
}
});
final FutureTask<ZKLoadRebalanceListener> existsTask =
this.consumerLoadBalanceListeners.putIfAbsent(fetchManager, task);
if (existsTask == null) {
task.run();
}
else {
throw new MetaClientException("Consumer has been already registed");
}
}
protected ZKLoadRebalanceListener registerConsumerInternal(final ZKLoadRebalanceListener loadBalanceListener)
throws UnknownHostException, InterruptedException, Exception {
final ZKGroupDirs dirs = this.metaZookeeper.new ZKGroupDirs(loadBalanceListener.consumerConfig.getGroup());
final String topicString = this.getTopicsString(loadBalanceListener.topicSubcriberRegistry);
if (this.zkClient == null) {
// ֱģʽ
loadBalanceListener.fetchManager.stopFetchRunner();
loadBalanceListener.fetchManager.resetFetchState();
// zkClientΪnullʹfetch
for (final String topic : loadBalanceListener.topicSubcriberRegistry.keySet()) {
final SubscriberInfo subInfo = loadBalanceListener.topicSubcriberRegistry.get(topic);
ConcurrentHashMap<Partition, TopicPartitionRegInfo> topicPartRegInfoMap =
loadBalanceListener.topicRegistry.get(topic);
if (topicPartRegInfoMap == null) {
topicPartRegInfoMap = new ConcurrentHashMap<Partition, TopicPartitionRegInfo>();
loadBalanceListener.topicRegistry.put(topic, topicPartRegInfoMap);
}
final Partition partition = new Partition(loadBalanceListener.consumerConfig.getPartition());
final long offset = loadBalanceListener.consumerConfig.getOffset();
final TopicPartitionRegInfo regInfo = new TopicPartitionRegInfo(topic, partition, offset);
topicPartRegInfoMap.put(partition, regInfo);
loadBalanceListener.fetchManager.addFetchRequest(new FetchRequest(new Broker(0,
loadBalanceListener.consumerConfig.getServerUrl()), 0L, regInfo, subInfo.getMaxSize()));
}
loadBalanceListener.fetchManager.startFetchRunner();
}
else {
// עconsumer id
ZkUtils.createEphemeralPathExpectConflict(this.zkClient, dirs.consumerRegistryDir + "/"
+ loadBalanceListener.consumerIdString, topicString);
// ͬһconsumerбǷб仯
this.zkClient.subscribeChildChanges(dirs.consumerRegistryDir, loadBalanceListener);
// ӶtopicķǷб仯
for (final String topic : loadBalanceListener.topicSubcriberRegistry.keySet()) {
final String partitionPath = this.metaZookeeper.brokerTopicsSubPath + "/" + topic;
ZkUtils.makeSurePersistentPathExists(this.zkClient, partitionPath);
this.zkClient.subscribeChildChanges(partitionPath, loadBalanceListener);
}
// zk client״̬ʱע
this.zkClient.subscribeStateChanges(new ZKSessionExpireListenner(loadBalanceListener));
// һΣҪȷbalance
loadBalanceListener.syncedRebalance();
}
return loadBalanceListener;
}
private String getTopicsString(final ConcurrentHashMap<String/* topic */, SubscriberInfo> topicSubcriberRegistry) {
final StringBuilder topicSb = new StringBuilder();
boolean wasFirst = true;
for (final String topic : topicSubcriberRegistry.keySet()) {
if (wasFirst) {
wasFirst = false;
topicSb.append(topic);
}
else {
topicSb.append(",").append(topic);
}
}
return topicSb.toString();
}
private final AtomicInteger counter = new AtomicInteger(0);
protected String getConsumerUUID(final ConsumerConfig consumerConfig) throws Exception {
String consumerUUID = null;
if (consumerConfig.getConsumerId() != null) {
consumerUUID = consumerConfig.getConsumerId();
}
else {
consumerUUID =
RemotingUtils.getLocalAddress() + "-" + System.currentTimeMillis() + "-"
+ this.counter.incrementAndGet();
}
return consumerUUID;
}
@Override
public void onZkClientChanged(final ZkClient newClient) {
this.zkClient = newClient;
// עconsumer
for (final FutureTask<ZKLoadRebalanceListener> task : this.consumerLoadBalanceListeners.values()) {
try {
final ZKLoadRebalanceListener listener = task.get();
// ҪеעϢֹעconsumerʧܵʱύoffset¸Ǹµoffset
listener.topicRegistry.clear();
log.info("re-register consumer to zk,group=" + listener.consumerConfig.getGroup());
this.registerConsumerInternal(listener);
}
catch (final Exception e) {
log.error("reRegister consumer failed", e);
}
}
}
class ZKSessionExpireListenner implements IZkStateListener {
private final String consumerIdString;
private final ZKLoadRebalanceListener loadBalancerListener;
public ZKSessionExpireListenner(final ZKLoadRebalanceListener loadBalancerListener) {
super();
this.consumerIdString = loadBalancerListener.consumerIdString;
this.loadBalancerListener = loadBalancerListener;
}
@Override
public void handleNewSession() throws Exception {
/**
* When we get a SessionExpired event, we lost all ephemeral nodes
* and zkclient has reestablished a connection for us. We need to
* release the ownership of the current consumer and re-register
* this consumer in the consumer registry and trigger a rebalance.
*/
;
log.info("ZK expired; release old broker parition ownership; re-register consumer " + this.consumerIdString);
this.loadBalancerListener.resetState();
ConsumerZooKeeper.this.registerConsumerInternal(this.loadBalancerListener);
;
// explicitly trigger load balancing for this consumer
this.loadBalancerListener.syncedRebalance();
}
@Override
public void handleStateChanged(final KeeperState state) throws Exception {
// do nothing, since zkclient will do reconnect for us.
}
@Override
public boolean equals(final Object obj) {
if (!(obj instanceof ZKSessionExpireListenner)) {
return false;
}
final ZKSessionExpireListenner other = (ZKSessionExpireListenner) obj;
return this.loadBalancerListener.equals(other.loadBalancerListener);
}
@Override
public int hashCode() {
return this.loadBalancerListener.hashCode();
}
}
static final Log log = LogFactory.getLog(ConsumerZooKeeper.class);
/**
* Consumer load balance listener for zookeeper. This is a internal class
* for consumer,you should not use it directly in your code.
*
* @author dennis<[email protected]>
*
*/
public class ZKLoadRebalanceListener implements IZkChildListener {
private final ZKGroupDirs dirs;
private final String group;
protected final String consumerIdString;
static final int MAX_N_RETRIES = 10;
private final LoadBalanceStrategy loadBalanceStrategy;
Map<String, List<String>> oldConsumersPerTopicMap = new HashMap<String, List<String>>();
Map<String, List<String>> oldPartitionsPerTopicMap = new HashMap<String, List<String>>();
private final Lock rebalanceLock = new ReentrantLock();
/**
* ĵtopicӦbroker,offsetϢ
*/
final ConcurrentHashMap<String/* topic */, ConcurrentHashMap<Partition, TopicPartitionRegInfo>> topicRegistry =
new ConcurrentHashMap<String, ConcurrentHashMap<Partition, TopicPartitionRegInfo>>();
/**
* ϢСϢ
*/
private final ConcurrentHashMap<String/* topic */, SubscriberInfo> topicSubcriberRegistry;
private final ConsumerConfig consumerConfig;
private final OffsetStorage offsetStorage;
private final FetchManager fetchManager;
Set<Broker> oldBrokerSet = new HashSet<Broker>();
private Cluster oldCluster = new Cluster();
public ZKLoadRebalanceListener(final FetchManager fetchManager, final ZKGroupDirs dirs,
final String consumerIdString, final ConsumerConfig consumerConfig, final OffsetStorage offsetStorage,
final ConcurrentHashMap<String/* topic */, SubscriberInfo> topicSubcriberRegistry,
final LoadBalanceStrategy loadBalanceStrategy) {
super();
this.fetchManager = fetchManager;
this.dirs = dirs;
this.consumerIdString = consumerIdString;
this.group = consumerConfig.getGroup();
this.consumerConfig = consumerConfig;
this.offsetStorage = offsetStorage;
this.topicSubcriberRegistry = topicSubcriberRegistry;
this.loadBalanceStrategy = loadBalanceStrategy;
}
/**
* offsetzk
*/
private void commitOffsets() {
this.offsetStorage.commitOffset(this.consumerConfig.getGroup(), this.getTopicPartitionRegInfos());
}
private TopicPartitionRegInfo initTopicPartitionRegInfo(final String topic, final String group,
final Partition partition, final long offset) {
this.offsetStorage.initOffset(topic, group, partition, offset);
return new TopicPartitionRegInfo(topic, partition, offset);
}
/**
* Returns current topic-partitions info.
*
* @since 1.4.4
* @return
*/
public Map<String/* topic */, Set<Partition>> getTopicPartitions() {
Map<String, Set<Partition>> rt = new HashMap<String, Set<Partition>>();
for (Map.Entry<String, ConcurrentHashMap<Partition, TopicPartitionRegInfo>> entry : this.topicRegistry
.entrySet()) {
rt.put(entry.getKey(), entry.getValue().keySet());
}
return rt;
}
List<TopicPartitionRegInfo> getTopicPartitionRegInfos() {
final List<TopicPartitionRegInfo> rt = new ArrayList<TopicPartitionRegInfo>();
for (final ConcurrentHashMap<Partition, TopicPartitionRegInfo> subMap : this.topicRegistry.values()) {
final Collection<TopicPartitionRegInfo> values = subMap.values();
if (values != null) {
rt.addAll(values);
}
}
return rt;
}
/**
* offsetϢ
*
* @param topic
* @param partition
* @return
*/
private TopicPartitionRegInfo loadTopicPartitionRegInfo(final String topic, final Partition partition) {
return this.offsetStorage.load(topic, this.consumerConfig.getGroup(), partition);
}
@Override
public void handleChildChange(final String parentPath, final List<String> currentChilds) throws Exception {
this.syncedRebalance();
}
void syncedRebalance() throws Exception {
this.rebalanceLock.lock();
try {
for (int i = 0; i < MAX_N_RETRIES; i++) {
log.info("begin rebalancing consumer " + this.consumerIdString + " try #" + i);
boolean done;
try {
done = this.rebalance();
}
catch (final Throwable e) {
// Ԥ֮쳣,һ,
// пǶconsumerͬʱrebalanceɵĶȡzkݲһ,-- wuhua
log.warn("unexpected exception occured while try rebalancing", e);
done = false;
}
log.info("end rebalancing consumer " + this.consumerIdString + " try #" + i);
if (done) {
log.info("rebalance success.");
return;
}
else {
log.warn("rebalance failed,try #" + i);
}
// release all partitions, reset state and retry
this.releaseAllPartitionOwnership();
this.resetState();
// ȴzkͬ
Thread.sleep(ConsumerZooKeeper.this.zkConfig.zkSyncTimeMs);
}
log.error("rebalance failed,finally");
}
finally {
this.rebalanceLock.unlock();
}
}
private void resetState() {
this.topicRegistry.clear();
this.oldConsumersPerTopicMap.clear();
this.oldPartitionsPerTopicMap.clear();
}
/**
* fetch߳
*
* @param cluster
*/
protected void updateFetchRunner(final Cluster cluster) throws Exception {
this.fetchManager.resetFetchState();
final Set<Broker> changedBrokers = new HashSet<Broker>();
for (final Map.Entry<String/* topic */, ConcurrentHashMap<Partition, TopicPartitionRegInfo>> entry : this.topicRegistry
.entrySet()) {
final String topic = entry.getKey();
for (final Map.Entry<Partition, TopicPartitionRegInfo> partEntry : entry.getValue().entrySet()) {
final Partition partition = partEntry.getKey();
final TopicPartitionRegInfo info = partEntry.getValue();
// ȡmasterslaveһ,wuhua
final Broker broker = cluster.getBrokerRandom(partition.getBrokerId());
if (broker != null) {
changedBrokers.add(broker);
final SubscriberInfo subscriberInfo = this.topicSubcriberRegistry.get(topic);
// fetch
this.fetchManager.addFetchRequest(new FetchRequest(broker, 0L, info, subscriberInfo
.getMaxSize()));
}
}
}
//
for (final Broker broker : changedBrokers) {
if (!this.oldBrokerSet.contains(broker)) {
try {
ConsumerZooKeeper.this.remotingClient.connect(broker.getZKString());
ConsumerZooKeeper.this.remotingClient.awaitReadyInterrupt(broker.getZKString());
log.info("Connect to " + broker.getZKString());
}
catch (final NotifyRemotingException e) {
log.error("Connect to " + broker.getZKString() + " failed", e);
}
catch (final InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
// ر
for (final Broker broker : this.oldBrokerSet) {
if (!changedBrokers.contains(broker)) {
try {
ConsumerZooKeeper.this.remotingClient.close(broker.getZKString(), false);
log.info("Closing " + broker.getZKString());
}
catch (final NotifyRemotingException e) {
log.error("Connect to " + broker.getZKString() + " failed", e);
}
}
}
// fetch߳
log.info("Starting fetch runners");
this.oldBrokerSet = changedBrokers;
this.fetchManager.startFetchRunner();
}
boolean rebalance() throws Exception {
final Map<String/* topic */, String/* consumerId */> myConsumerPerTopicMap =
this.getConsumerPerTopic(this.consumerIdString);
final Cluster cluster = ConsumerZooKeeper.this.metaZookeeper.getCluster();
Map<String/* topic */, List<String>/* consumer list */> consumersPerTopicMap = null;
try {
consumersPerTopicMap = this.getConsumersPerTopic(this.group);
}
catch (final NoNodeException e) {
// consumerͬʱڸؾʱ,ܻᵽ -- wuhua
log.warn("maybe other consumer is rebalancing now," + e.getMessage());
return false;
}
final Map<String, List<String>> partitionsPerTopicMap =
this.getPartitionStringsForTopics(myConsumerPerTopicMap);
final Map<String/* topic */, String/* consumer id */> relevantTopicConsumerIdMap =
this.getRelevantTopicMap(myConsumerPerTopicMap, partitionsPerTopicMap,
this.oldPartitionsPerTopicMap, consumersPerTopicMap, this.oldConsumersPerTopicMap);
// ûбƽ
if (relevantTopicConsumerIdMap.size() <= 0) {
// ,topicûб仯,һ̨,
// partitionsPerTopicMapûб仯,
// Ҫ鼯Ⱥı仯
if (this.checkClusterChange(cluster)) {
log.info("Stopping fetch runners,maybe master or slave changed");
this.fetchManager.stopFetchRunner();
this.updateFetchRunner(cluster);
this.oldCluster = cluster;
}
else {
log.info("Consumer " + this.consumerIdString + " with " + consumersPerTopicMap
+ " doesn't need to be rebalanced.");
}
return true;
}
log.info("Stopping fetch runners");
this.fetchManager.stopFetchRunner();
log.info("Comitting all offsets");
this.commitOffsets();
for (final Map.Entry<String, String> entry : relevantTopicConsumerIdMap.entrySet()) {
final String topic = entry.getKey();
final String consumerId = entry.getValue();
final ZKGroupTopicDirs topicDirs =
ConsumerZooKeeper.this.metaZookeeper.new ZKGroupTopicDirs(topic, this.group);
// ǰtopicĶ
final List<String> curConsumers = consumersPerTopicMap.get(topic);
// ǰtopicķ
final List<String> curPartitions = partitionsPerTopicMap.get(topic);
if (curConsumers == null) {
log.info("Releasing partition ownerships for topic:" + topic);
this.releasePartitionOwnership(topic);
this.topicRegistry.remove(topic);
log.info("There are no consumers subscribe topic " + topic);
continue;
}
if (curPartitions == null) {
log.info("Releasing partition ownerships for topic:" + topic);
this.releasePartitionOwnership(topic);
this.topicRegistry.remove(topic);
log.info("There are no partitions under topic " + topic);
continue;
}
// ݸؾԻȡconsumerӦpartitionб
final List<String> newParts =
this.loadBalanceStrategy.getPartitions(topic, consumerId, curConsumers, curPartitions);
// 鿴ǰtopicķб鿴Ƿб
ConcurrentHashMap<Partition, TopicPartitionRegInfo> partRegInfos = this.topicRegistry.get(topic);
if (partRegInfos == null) {
partRegInfos = new ConcurrentHashMap<Partition, TopicPartitionRegInfo>();
this.topicRegistry.put(topic, new ConcurrentHashMap<Partition, TopicPartitionRegInfo>());
}
final Set<Partition> currentParts = partRegInfos.keySet();
for (final Partition partition : currentParts) {
// µķбвڵķҪͷownerShipҲϵУµû
if (!newParts.contains(partition.toString())) {
log.info("Releasing partition ownerships for partition:" + partition);
partRegInfos.remove(partition);
this.releasePartitionOwnership(topic, partition);
}
}
for (final String partition : newParts) {
// ǰûеķȥҲµУϵû
if (!currentParts.contains(new Partition(partition))) {
log.info(consumerId + " attempting to claim partition " + partition);
// עownerϵ
if (!this.processPartition(topicDirs, partition, topic, consumerId)) {
return false;
}
}
}
}
this.updateFetchRunner(cluster);
this.oldPartitionsPerTopicMap = partitionsPerTopicMap;
this.oldConsumersPerTopicMap = consumersPerTopicMap;
this.oldCluster = cluster;
return true;
}
protected boolean checkClusterChange(final Cluster cluster) {
return !this.oldCluster.equals(cluster);
}
protected Map<String, List<String>> getPartitionStringsForTopics(final Map<String, String> myConsumerPerTopicMap) {
return ConsumerZooKeeper.this.metaZookeeper.getPartitionStringsForSubTopics(myConsumerPerTopicMap.keySet());
}
/**
* ӷownerϵ
*
* @param topicDirs
* @param partition
* @param topic
* @param consumerThreadId
* @return
*/
protected boolean processPartition(final ZKGroupTopicDirs topicDirs, final String partition,
final String topic, final String consumerThreadId) throws Exception {
final String partitionOwnerPath = topicDirs.consumerOwnerDir + "/" + partition;
try {
ZkUtils.createEphemeralPathExpectConflict(ConsumerZooKeeper.this.zkClient, partitionOwnerPath,
consumerThreadId);
}
catch (final ZkNodeExistsException e) {
// ԭʼĹϵӦѾɾԺ
log.info("waiting for the partition ownership to be deleted: " + partition);
return false;
}
catch (final Exception e) {
throw e;
}
this.addPartitionTopicInfo(topicDirs, partition, topic, consumerThreadId);
return true;
}
// ȡoffsetϢ浽
protected void addPartitionTopicInfo(final ZKGroupTopicDirs topicDirs, final String partitionString,
final String topic, final String consumerThreadId) {
final Partition partition = new Partition(partitionString);
final ConcurrentHashMap<Partition, TopicPartitionRegInfo> partitionTopicInfo =
this.topicRegistry.get(topic);
TopicPartitionRegInfo existsTopicPartitionRegInfo = this.loadTopicPartitionRegInfo(topic, partition);
if (existsTopicPartitionRegInfo == null) {
// ʼʱĬʹ0,TODO ܲ
existsTopicPartitionRegInfo =
this.initTopicPartitionRegInfo(topic, consumerThreadId, partition,
this.consumerConfig.getOffset());// Long.MAX_VALUE
}
partitionTopicInfo.put(partition, existsTopicPartitionRegInfo);
}
/**
* ͷŷȨ
*/
private void releaseAllPartitionOwnership() {
for (final Map.Entry<String, ConcurrentHashMap<Partition, TopicPartitionRegInfo>> entry : this.topicRegistry
.entrySet()) {
final String topic = entry.getKey();
final ZKGroupTopicDirs topicDirs =
ConsumerZooKeeper.this.metaZookeeper.new ZKGroupTopicDirs(topic, this.consumerConfig.getGroup());
for (final Partition partition : entry.getValue().keySet()) {
final String znode = topicDirs.consumerOwnerDir + "/" + partition;
this.deleteOwnership(znode);
}
}
}
/**
* ͷָownership
*
* @param topic
* @param partition
*/
private void releasePartitionOwnership(final String topic, final Partition partition) {
final ZKGroupTopicDirs topicDirs =
ConsumerZooKeeper.this.metaZookeeper.new ZKGroupTopicDirs(topic, this.consumerConfig.getGroup());
final String znode = topicDirs.consumerOwnerDir + "/" + partition;
this.deleteOwnership(znode);
}
private void deleteOwnership(final String znode) {
try {
ZkUtils.deletePath(ConsumerZooKeeper.this.zkClient, znode);
}
catch (final Throwable t) {
log.error("exception during releasePartitionOwnership", t);
}
if (log.isDebugEnabled()) {
log.debug("Consumer " + this.consumerIdString + " releasing " + znode);
}
}
/**
* ͷָtopicownership
*
* @param topic
* @param partition
*/
private void releasePartitionOwnership(final String topic) {
final ZKGroupTopicDirs topicDirs =
ConsumerZooKeeper.this.metaZookeeper.new ZKGroupTopicDirs(topic, this.consumerConfig.getGroup());
final ConcurrentHashMap<Partition, TopicPartitionRegInfo> partInfos = this.topicRegistry.get(topic);
if (partInfos != null) {
for (final Partition partition : partInfos.keySet()) {
final String znode = topicDirs.consumerOwnerDir + "/" + partition;
this.deleteOwnership(znode);
}
}
}
/**
* бtopicconsumer
*
* @param myConsumerPerTopicMap
* @param newPartMap
* @param oldPartMap
* @param newConsumerMap
* @param oldConsumerMap
* @return
*/
private Map<String, String> getRelevantTopicMap(final Map<String, String> myConsumerPerTopicMap,
final Map<String, List<String>> newPartMap, final Map<String, List<String>> oldPartMap,
final Map<String, List<String>> newConsumerMap, final Map<String, List<String>> oldConsumerMap) {
final Map<String, String> relevantTopicThreadIdsMap = new HashMap<String, String>();
for (final Map.Entry<String, String> entry : myConsumerPerTopicMap.entrySet()) {
final String topic = entry.getKey();
final String consumerId = entry.getValue();
// жϷ߶бǷ
if (!this.listEquals(oldPartMap.get(topic), newPartMap.get(topic))
|| !this.listEquals(oldConsumerMap.get(topic), newConsumerMap.get(topic))) {
relevantTopicThreadIdsMap.put(topic, consumerId);
}
}
return relevantTopicThreadIdsMap;
}
private boolean listEquals(final List<String> list1, final List<String> list2) {
if (list1 == null && list2 != null) {
return false;
}
if (list1 != null && list2 == null) {
return false;
}
if (list1 == null && list2 == null) {
return true;
}
return list1.equals(list2);
}
/**
* ȡij鶩ĵtopic֮ӳmap
*
* @param group
* @return
* @throws Exception
* @throws NoNodeException
* consumerͬʱڸؾʱ,ܻ׳NoNodeException
*/
protected Map<String, List<String>> getConsumersPerTopic(final String group) throws Exception, NoNodeException {
final List<String> consumers =
ZkUtils.getChildren(ConsumerZooKeeper.this.zkClient, this.dirs.consumerRegistryDir);
final Map<String, List<String>> consumersPerTopicMap = new HashMap<String, List<String>>();
for (final String consumer : consumers) {
final List<String> topics = this.getTopics(consumer);// consumerͬʱڸؾʱ,ܻ׳NoNodeException--wuhua
for (final String topic : topics) {
if (consumersPerTopicMap.get(topic) == null) {
final List<String> list = new ArrayList<String>();
list.add(consumer);
consumersPerTopicMap.put(topic, list);
}
else {
consumersPerTopicMap.get(topic).add(consumer);
}
}
}
//
for (final Map.Entry<String, List<String>> entry : consumersPerTopicMap.entrySet()) {
Collections.sort(entry.getValue());
}
return consumersPerTopicMap;
}
public Map<String, String> getConsumerPerTopic(final String consumerId) throws Exception {
final List<String> topics = this.getTopics(consumerId);
final Map<String/* topic */, String/* consumerId */> rt = new HashMap<String, String>();
for (final String topic : topics) {
rt.put(topic, consumerId);
}
return rt;
}
/**
* consumerIdȡĵtopicб
*
* @param consumerId
* @return
* @throws Exception
*/
protected List<String> getTopics(final String consumerId) throws Exception {
final String topicsString =
ZkUtils.readData(ConsumerZooKeeper.this.zkClient, this.dirs.consumerRegistryDir + "/" + consumerId);
final String[] topics = topicsString.split(",");
final List<String> rt = new ArrayList<String>(topics.length);
for (final String topic : topics) {
rt.add(topic);
}
return rt;
}
}
} | Synchronized rebalance
| metamorphosis-client/src/main/java/com/taobao/metamorphosis/client/consumer/ConsumerZooKeeper.java | Synchronized rebalance |
|
Java | apache-2.0 | db5597ed5301fb5f9f60ac3ac291cf6509793a82 | 0 | jarlehansen/springfox-loader | package com.github.springfox.loader;
import com.github.springfox.loader.plugins.LoaderOperationPlugin;
import com.github.springfox.loader.plugins.LoaderTagProvider;
import io.swagger.annotations.Extension;
import io.swagger.annotations.ExtensionProperty;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.context.EmbeddedValueResolverAware;
import org.springframework.context.annotation.*;
import org.springframework.util.StringUtils;
import org.springframework.util.StringValueResolver;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.ViewControllerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
import springfox.documentation.RequestHandler;
import springfox.documentation.builders.PathSelectors;
import springfox.documentation.builders.RequestHandlerSelectors;
import springfox.documentation.service.ApiInfo;
import springfox.documentation.service.ObjectVendorExtension;
import springfox.documentation.service.StringVendorExtension;
import springfox.documentation.service.VendorExtension;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spring.web.plugins.ApiSelectorBuilder;
import springfox.documentation.spring.web.plugins.Docket;
import springfox.documentation.spring.web.readers.operation.DefaultTagsProvider;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.function.Predicate;
import java.util.stream.Collectors;
@EnableConfigurationProperties
@Configuration
@ComponentScan(basePackageClasses = SpringfoxLoaderConfig.class)
public class SpringfoxLoaderConfig extends WebMvcConfigurerAdapter implements ApplicationContextAware, EmbeddedValueResolverAware {
private SpringfoxLoader springfoxLoader = new SpringfoxLoader();
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
springfoxLoader.setApplicationContext(applicationContext);
}
@Override
public void setEmbeddedValueResolver(StringValueResolver stringValueResolver) {
springfoxLoader.setStringValueResolver(stringValueResolver);
}
@Autowired
public void setSpringfoxLoaderProps(SpringfoxLoaderProps loaderProps) {
springfoxLoader.setSpringfoxLoaderProps(loaderProps);
}
@Bean
@Conditional(ActiveProfilesCondition.class)
public Docket api() {
ApiSelectorBuilder apiSelectorBuilder = new Docket(DocumentationType.SWAGGER_2).select();
Predicate<RequestHandler> predicate = RequestHandlerSelectors.basePackage(springfoxLoader.getBasePackage())::apply;
if (springfoxLoader.includeControllers().length > 0) {
Class<?>[] controllers = springfoxLoader.includeControllers();
for (Class<?> controller : controllers) {
Predicate<RequestHandler> includeControllerRequestHandler = RequestHandlerSelectors.basePackage(controller.getPackage().getName())::apply;
predicate = predicate.or(includeControllerRequestHandler);
}
}
apiSelectorBuilder.apis(predicate::test);
apiSelectorBuilder.paths(PathSelectors.any()).build().apiInfo(apiInfo()).pathMapping(springfoxLoader.getPath());
return apiSelectorBuilder.build();
}
private ApiInfo apiInfo() {
return new ApiInfo(springfoxLoader.getTitle(), springfoxLoader.getDescription(), springfoxLoader.getVersion(),
springfoxLoader.getTermsOfServiceUrl(), springfoxLoader.getContact(), springfoxLoader.getLicense(), springfoxLoader.getLicenseUrl(), getVendorExtensions());
}
private List<VendorExtension> getVendorExtensions() {
Extension[] extensions = springfoxLoader.extensions();
if (extensions.length == 1 && StringUtils.isEmpty(extensions[0].name())) {
return Collections.emptyList();
}
return Arrays.stream(extensions).map(extension -> {
ExtensionProperty[] extensionProperties = extension.properties();
List<StringVendorExtension> vendorExtensions = Arrays.stream(extensionProperties)
.map(property -> new StringVendorExtension(property.name(), property.value())).collect(Collectors.toList());
ObjectVendorExtension vendorExtension = new ObjectVendorExtension(extension.name());
vendorExtensions.forEach(vendorExtension::addProperty);
return vendorExtension;
}).collect(Collectors.toList());
}
@Bean
@Primary
@Conditional(ActiveProfilesCondition.class)
public DefaultTagsProvider loaderDefaultTagsProvider() {
return new LoaderTagProvider(springfoxLoader.conventionMode());
}
@Bean
@Conditional(ActiveProfilesCondition.class)
public LoaderOperationPlugin loaderOperationPlugin() {
return new LoaderOperationPlugin(springfoxLoader.conventionMode());
}
@Override
public void addViewControllers(ViewControllerRegistry registry) {
if (!StringUtils.isEmpty(springfoxLoader.swaggerUiBasePath())) {
registry.addRedirectViewController(resourcePath("/v2/api-docs"), "/v2/api-docs");
registry.addRedirectViewController(resourcePath("/swagger-resources/configuration/ui"), "/swagger-resources/configuration/ui");
registry.addRedirectViewController(resourcePath("/swagger-resources/configuration/security"), "/swagger-resources/configuration/security");
registry.addRedirectViewController(resourcePath("/swagger-resources"), "/swagger-resources");
}
}
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
if (!StringUtils.isEmpty(springfoxLoader.swaggerUiBasePath())) {
registry.addResourceHandler(resourcePath("/swagger-ui.html**")).addResourceLocations("classpath:/META-INF/resources/swagger-ui.html");
registry.addResourceHandler(resourcePath("/webjars/**")).addResourceLocations("classpath:/META-INF/resources/webjars/");
}
}
private String resourcePath(String path) {
return springfoxLoader.swaggerUiBasePath() + path;
}
}
| src/main/java/com/github/springfox/loader/SpringfoxLoaderConfig.java | package com.github.springfox.loader;
import com.github.springfox.loader.plugins.LoaderOperationPlugin;
import com.github.springfox.loader.plugins.LoaderTagProvider;
import io.swagger.annotations.Extension;
import io.swagger.annotations.ExtensionProperty;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.context.EmbeddedValueResolverAware;
import org.springframework.context.annotation.*;
import org.springframework.util.StringUtils;
import org.springframework.util.StringValueResolver;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.ViewControllerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
import springfox.documentation.RequestHandler;
import springfox.documentation.builders.PathSelectors;
import springfox.documentation.builders.RequestHandlerSelectors;
import springfox.documentation.service.ApiInfo;
import springfox.documentation.service.ObjectVendorExtension;
import springfox.documentation.service.StringVendorExtension;
import springfox.documentation.service.VendorExtension;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spring.web.plugins.ApiSelectorBuilder;
import springfox.documentation.spring.web.plugins.Docket;
import springfox.documentation.spring.web.readers.operation.DefaultTagsProvider;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.function.Predicate;
import java.util.stream.Collectors;
@EnableConfigurationProperties
@Configuration
@ComponentScan(basePackageClasses = SpringfoxLoaderConfig.class)
public class SpringfoxLoaderConfig extends WebMvcConfigurerAdapter implements ApplicationContextAware, EmbeddedValueResolverAware {
private SpringfoxLoader springfoxLoader = new SpringfoxLoader();
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
springfoxLoader.setApplicationContext(applicationContext);
}
@Override
public void setEmbeddedValueResolver(StringValueResolver stringValueResolver) {
springfoxLoader.setStringValueResolver(stringValueResolver);
}
@Autowired
public void setSpringfoxLoaderProps(SpringfoxLoaderProps loaderProps) {
springfoxLoader.setSpringfoxLoaderProps(loaderProps);
}
@Bean
@Conditional(ActiveProfilesCondition.class)
public Docket api() {
ApiSelectorBuilder apiSelectorBuilder = new Docket(DocumentationType.SWAGGER_2).select();
Predicate<RequestHandler> predicate = RequestHandlerSelectors.basePackage(springfoxLoader.getBasePackage())::apply;
if (springfoxLoader.includeControllers().length > 0) {
Class<?>[] controllers = springfoxLoader.includeControllers();
for (Class<?> controller : controllers) {
Predicate<RequestHandler> includeControllerRequestHandler = RequestHandlerSelectors.basePackage(controller.getPackage().getName())::apply;
predicate = predicate.or(includeControllerRequestHandler);
}
}
apiSelectorBuilder.apis(predicate::test);
apiSelectorBuilder.paths(PathSelectors.any()).build().apiInfo(apiInfo()).pathMapping(springfoxLoader.getPath());
return apiSelectorBuilder.build();
}
private ApiInfo apiInfo() {
return new ApiInfo(springfoxLoader.getTitle(), springfoxLoader.getDescription(), springfoxLoader.getVersion(),
springfoxLoader.getTermsOfServiceUrl(), springfoxLoader.getContact(), springfoxLoader.getLicense(), springfoxLoader.getLicenseUrl(), getVendorExtensions());
}
private List<VendorExtension> getVendorExtensions() {
Extension[] extensions = springfoxLoader.extensions();
if (extensions.length == 1 && StringUtils.isEmpty(extensions[0].name())) {
return Collections.emptyList();
}
return Arrays.stream(extensions).map(extension -> {
ExtensionProperty[] extensionProperties = extension.properties();
List<StringVendorExtension> vendorExtensions = Arrays.stream(extensionProperties).map(property -> new StringVendorExtension(property.name(), property.value())).collect(Collectors.toList());
ObjectVendorExtension vendorExtension = new ObjectVendorExtension(extension.name());
for (StringVendorExtension stringVendorExtension : vendorExtensions) {
vendorExtension.addProperty(stringVendorExtension);
}
return vendorExtension;
}).collect(Collectors.toList());
}
@Bean
@Primary
@Conditional(ActiveProfilesCondition.class)
public DefaultTagsProvider loaderDefaultTagsProvider() {
return new LoaderTagProvider(springfoxLoader.conventionMode());
}
@Bean
@Conditional(ActiveProfilesCondition.class)
public LoaderOperationPlugin loaderOperationPlugin() {
return new LoaderOperationPlugin(springfoxLoader.conventionMode());
}
@Override
public void addViewControllers(ViewControllerRegistry registry) {
if (!StringUtils.isEmpty(springfoxLoader.swaggerUiBasePath())) {
registry.addRedirectViewController(resourcePath("/v2/api-docs"), "/v2/api-docs");
registry.addRedirectViewController(resourcePath("/swagger-resources/configuration/ui"), "/swagger-resources/configuration/ui");
registry.addRedirectViewController(resourcePath("/swagger-resources/configuration/security"), "/swagger-resources/configuration/security");
registry.addRedirectViewController(resourcePath("/swagger-resources"), "/swagger-resources");
}
}
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
if (!StringUtils.isEmpty(springfoxLoader.swaggerUiBasePath())) {
registry.addResourceHandler(resourcePath("/swagger-ui.html**")).addResourceLocations("classpath:/META-INF/resources/swagger-ui.html");
registry.addResourceHandler(resourcePath("/webjars/**")).addResourceLocations("classpath:/META-INF/resources/webjars/");
}
}
private String resourcePath(String path) {
return springfoxLoader.swaggerUiBasePath() + path;
}
}
| small code cleanup in getVendorExtensions()
| src/main/java/com/github/springfox/loader/SpringfoxLoaderConfig.java | small code cleanup in getVendorExtensions() |
|
Java | apache-2.0 | 4b25a12f907d120b388ed0a633b6fe48909f5643 | 0 | lsmaira/gradle,gstevey/gradle,gradle/gradle,robinverduijn/gradle,gradle/gradle,robinverduijn/gradle,gradle/gradle,lsmaira/gradle,gradle/gradle,robinverduijn/gradle,blindpirate/gradle,robinverduijn/gradle,blindpirate/gradle,gstevey/gradle,robinverduijn/gradle,gstevey/gradle,blindpirate/gradle,robinverduijn/gradle,blindpirate/gradle,gstevey/gradle,lsmaira/gradle,gstevey/gradle,gstevey/gradle,lsmaira/gradle,gradle/gradle,lsmaira/gradle,lsmaira/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,gstevey/gradle,gradle/gradle,lsmaira/gradle,blindpirate/gradle,robinverduijn/gradle,lsmaira/gradle,lsmaira/gradle,gradle/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,gstevey/gradle,blindpirate/gradle,robinverduijn/gradle,gstevey/gradle,lsmaira/gradle,robinverduijn/gradle,robinverduijn/gradle,robinverduijn/gradle,blindpirate/gradle | /*
* Copyright 2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.internal.artifacts.dsl;
import groovy.lang.Closure;
import org.apache.ivy.plugins.resolver.DependencyResolver;
import org.gradle.api.Action;
import org.gradle.api.artifacts.dsl.RepositoryHandler;
import org.gradle.api.artifacts.repositories.FlatDirectoryArtifactRepository;
import org.gradle.api.artifacts.repositories.IvyArtifactRepository;
import org.gradle.api.artifacts.repositories.MavenArtifactRepository;
import org.gradle.api.internal.ClosureBackedAction;
import org.gradle.api.internal.ConfigureByMapAction;
import org.gradle.api.internal.artifacts.BaseRepositoryFactory;
import org.gradle.api.internal.artifacts.DefaultArtifactRepositoryContainer;
import org.gradle.api.internal.artifacts.configurations.ResolverProvider;
import org.gradle.api.internal.artifacts.repositories.FixedResolverArtifactRepository;
import org.gradle.internal.reflect.Instantiator;
import org.gradle.util.ConfigureUtil;
import org.gradle.util.DeprecationLogger;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.gradle.util.CollectionUtils.flattenToList;
/**
* @author Hans Dockter
*/
public class DefaultRepositoryHandler extends DefaultArtifactRepositoryContainer implements RepositoryHandler, ResolverProvider {
public static final String FLAT_DIR_DEFAULT_NAME = "flatDir";
private static final String MAVEN_REPO_DEFAULT_NAME = "maven";
private static final String IVY_REPO_DEFAULT_NAME = "ivy";
private final BaseRepositoryFactory repositoryFactory;
public DefaultRepositoryHandler(BaseRepositoryFactory repositoryFactory, Instantiator instantiator) {
super(repositoryFactory, instantiator);
this.repositoryFactory = repositoryFactory;
}
public FlatDirectoryArtifactRepository flatDir(Action<? super FlatDirectoryArtifactRepository> action) {
return addRepository(repositoryFactory.createFlatDirRepository(), FLAT_DIR_DEFAULT_NAME, action);
}
public FlatDirectoryArtifactRepository flatDir(Closure configureClosure) {
return flatDir(new ClosureBackedAction<FlatDirectoryArtifactRepository>(configureClosure));
}
public FlatDirectoryArtifactRepository flatDir(Map<String, ?> args) {
Map<String, Object> modifiedArgs = new HashMap<String, Object>(args);
if (modifiedArgs.containsKey("dirs")) {
modifiedArgs.put("dirs", flattenToList(modifiedArgs.get("dirs")));
}
return flatDir(new ConfigureByMapAction<FlatDirectoryArtifactRepository>(modifiedArgs));
}
public MavenArtifactRepository mavenCentral() {
return addRepository(repositoryFactory.createMavenCentralRepository(), DEFAULT_MAVEN_CENTRAL_REPO_NAME);
}
public MavenArtifactRepository mavenCentral(Map<String, ?> args) {
Map<String, Object> modifiedArgs = new HashMap<String, Object>(args);
if (modifiedArgs.containsKey("urls")) {
DeprecationLogger.nagUserOfDeprecated(
"The 'urls' property of the RepositoryHandler.mavenCentral() method",
"You should use the 'artifactUrls' property to define additional artifact locations"
);
List<?> urls = flattenToList(modifiedArgs.remove("urls"));
modifiedArgs.put("artifactUrls", urls);
}
return addRepository(repositoryFactory.createMavenCentralRepository(), DEFAULT_MAVEN_CENTRAL_REPO_NAME, new ConfigureByMapAction<MavenArtifactRepository>(modifiedArgs));
}
public MavenArtifactRepository mavenLocal() {
return addRepository(repositoryFactory.createMavenLocalRepository(), DEFAULT_MAVEN_LOCAL_REPO_NAME);
}
public DependencyResolver mavenRepo(Map<String, ?> args) {
return mavenRepo(args, null);
}
public DependencyResolver mavenRepo(Map<String, ?> args, Closure configClosure) {
Map<String, Object> modifiedArgs = new HashMap<String, Object>(args);
if (modifiedArgs.containsKey("urls")) {
List<?> urls = flattenToList(modifiedArgs.remove("urls"));
if (!urls.isEmpty()) {
DeprecationLogger.nagUserOfDeprecated(
"The 'urls' property of the RepositoryHandler.mavenRepo() method",
"You should use the 'url' property to define the core maven repository & the 'artifactUrls' property to define any additional artifact locations"
);
modifiedArgs.put("url", urls.get(0));
List<?> extraUrls = urls.subList(1, urls.size());
modifiedArgs.put("artifactUrls", extraUrls);
}
}
MavenArtifactRepository repository = repositoryFactory.createMavenRepository();
ConfigureUtil.configureByMap(modifiedArgs, repository);
DependencyResolver resolver = repositoryFactory.toResolver(repository);
ConfigureUtil.configure(configClosure, resolver);
addRepository(new FixedResolverArtifactRepository(resolver), "mavenRepo");
return resolver;
}
public MavenArtifactRepository maven(Action<? super MavenArtifactRepository> action) {
return addRepository(repositoryFactory.createMavenRepository(), MAVEN_REPO_DEFAULT_NAME, action);
}
public MavenArtifactRepository maven(Closure closure) {
return maven(new ClosureBackedAction<MavenArtifactRepository>(closure));
}
public IvyArtifactRepository ivy(Action<? super IvyArtifactRepository> action) {
return addRepository(repositoryFactory.createIvyRepository(), IVY_REPO_DEFAULT_NAME, action);
}
public IvyArtifactRepository ivy(Closure closure) {
return ivy(new ClosureBackedAction<IvyArtifactRepository>(closure));
}
}
| subprojects/core/src/main/groovy/org/gradle/api/internal/artifacts/dsl/DefaultRepositoryHandler.java | /*
* Copyright 2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.internal.artifacts.dsl;
import groovy.lang.Closure;
import org.apache.ivy.plugins.resolver.DependencyResolver;
import org.gradle.api.Action;
import org.gradle.api.artifacts.dsl.RepositoryHandler;
import org.gradle.api.artifacts.repositories.FlatDirectoryArtifactRepository;
import org.gradle.api.artifacts.repositories.IvyArtifactRepository;
import org.gradle.api.artifacts.repositories.MavenArtifactRepository;
import org.gradle.api.internal.ClosureBackedAction;
import org.gradle.api.internal.ConfigureByMapAction;
import org.gradle.api.internal.artifacts.BaseRepositoryFactory;
import org.gradle.api.internal.artifacts.DefaultArtifactRepositoryContainer;
import org.gradle.api.internal.artifacts.configurations.ResolverProvider;
import org.gradle.api.internal.artifacts.repositories.FixedResolverArtifactRepository;
import org.gradle.internal.reflect.Instantiator;
import org.gradle.util.ConfigureUtil;
import org.gradle.util.DeprecationLogger;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.gradle.util.CollectionUtils.flattenToList;
/**
* @author Hans Dockter
*/
public class DefaultRepositoryHandler extends DefaultArtifactRepositoryContainer implements RepositoryHandler, ResolverProvider {
public static final String FLAT_DIR_DEFAULT_NAME = "flatDir";
private static final String MAVEN_REPO_DEFAULT_NAME = "maven";
private static final String IVY_REPO_DEFAULT_NAME = "ivy";
private final BaseRepositoryFactory repositoryFactory;
public DefaultRepositoryHandler(BaseRepositoryFactory repositoryFactory, Instantiator instantiator) {
super(repositoryFactory, instantiator);
this.repositoryFactory = repositoryFactory;
}
public FlatDirectoryArtifactRepository flatDir(Action<? super FlatDirectoryArtifactRepository> action) {
return addRepository(repositoryFactory.createFlatDirRepository(), FLAT_DIR_DEFAULT_NAME, action);
}
public FlatDirectoryArtifactRepository flatDir(Closure configureClosure) {
return flatDir(new ClosureBackedAction<FlatDirectoryArtifactRepository>(configureClosure));
}
public FlatDirectoryArtifactRepository flatDir(Map<String, ?> args) {
return flatDir(new ConfigureByMapAction<FlatDirectoryArtifactRepository>(args));
}
public MavenArtifactRepository mavenCentral() {
return addRepository(repositoryFactory.createMavenCentralRepository(), DEFAULT_MAVEN_CENTRAL_REPO_NAME);
}
public MavenArtifactRepository mavenCentral(Map<String, ?> args) {
Map<String, Object> modifiedArgs = new HashMap<String, Object>(args);
if (modifiedArgs.containsKey("urls")) {
DeprecationLogger.nagUserOfDeprecated(
"The 'urls' property of the RepositoryHandler.mavenCentral() method",
"You should use the 'artifactUrls' property to define additional artifact locations"
);
List<?> urls = flattenToList(modifiedArgs.remove("urls"));
modifiedArgs.put("artifactUrls", urls);
}
return addRepository(repositoryFactory.createMavenCentralRepository(), DEFAULT_MAVEN_CENTRAL_REPO_NAME, new ConfigureByMapAction<MavenArtifactRepository>(modifiedArgs));
}
public MavenArtifactRepository mavenLocal() {
return addRepository(repositoryFactory.createMavenLocalRepository(), DEFAULT_MAVEN_LOCAL_REPO_NAME);
}
public DependencyResolver mavenRepo(Map<String, ?> args) {
return mavenRepo(args, null);
}
public DependencyResolver mavenRepo(Map<String, ?> args, Closure configClosure) {
Map<String, Object> modifiedArgs = new HashMap<String, Object>(args);
if (modifiedArgs.containsKey("urls")) {
List<?> urls = flattenToList(modifiedArgs.remove("urls"));
if (!urls.isEmpty()) {
DeprecationLogger.nagUserOfDeprecated(
"The 'urls' property of the RepositoryHandler.mavenRepo() method",
"You should use the 'url' property to define the core maven repository & the 'artifactUrls' property to define any additional artifact locations"
);
modifiedArgs.put("url", urls.get(0));
List<?> extraUrls = urls.subList(1, urls.size());
modifiedArgs.put("artifactUrls", extraUrls);
}
}
MavenArtifactRepository repository = repositoryFactory.createMavenRepository();
ConfigureUtil.configureByMap(modifiedArgs, repository);
DependencyResolver resolver = repositoryFactory.toResolver(repository);
ConfigureUtil.configure(configClosure, resolver);
addRepository(new FixedResolverArtifactRepository(resolver), resolver.getName());
return resolver;
}
public MavenArtifactRepository maven(Action<? super MavenArtifactRepository> action) {
return addRepository(repositoryFactory.createMavenRepository(), MAVEN_REPO_DEFAULT_NAME, action);
}
public MavenArtifactRepository maven(Closure closure) {
return maven(new ClosureBackedAction<MavenArtifactRepository>(closure));
}
public IvyArtifactRepository ivy(Action<? super IvyArtifactRepository> action) {
return addRepository(repositoryFactory.createIvyRepository(), IVY_REPO_DEFAULT_NAME, action);
}
public IvyArtifactRepository ivy(Closure closure) {
return ivy(new ClosureBackedAction<IvyArtifactRepository>(closure));
}
}
| Reinstate special handling of 'dirs' property when configuring flat dir repo.
We should probably move this handling of converting single values into collection types more globally for all configure by map operations.
| subprojects/core/src/main/groovy/org/gradle/api/internal/artifacts/dsl/DefaultRepositoryHandler.java | Reinstate special handling of 'dirs' property when configuring flat dir repo. |
|
Java | apache-2.0 | 2067bc88816de12b0281c199c6bc39b954935f28 | 0 | pluto-build/pluto | package build.pluto.test.build;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.sugarj.common.FileCommands;
import build.pluto.builder.BuildCycleException;
import build.pluto.builder.BuildManager;
import build.pluto.builder.BuildRequest;
import build.pluto.builder.Builder;
import build.pluto.builder.BuilderFactory;
import build.pluto.builder.RequiredBuilderFailed;
import build.pluto.dependency.BuildRequirement;
import build.pluto.stamp.FileHashStamper;
import build.pluto.stamp.Stamper;
import build.pluto.test.EmptyBuildOutput;
import build.pluto.util.AbsoluteComparedFile;
public class BuildManagerCycleDetectionTest {
private static File baseDir = new File("testdata/CycleDetectionTest/");
@Before
public void emptyDir() throws IOException{
FileCommands.delete(baseDir.toPath());
FileCommands.createDir(baseDir.toPath());
}
public static final BuilderFactory<File, EmptyBuildOutput, TestBuilder> testFactory = new BuilderFactory<File, EmptyBuildOutput, TestBuilder>() {
private static final long serialVersionUID = 3231801709410953205L;
@Override
public TestBuilder makeBuilder(File input) {
return new TestBuilder(input);
}
};
private static class TestBuilder extends Builder<File, EmptyBuildOutput> {
private TestBuilder(File input) {
super(input);
}
@Override
protected String description() {
return "Test Builder " + input.getAbsolutePath();
}
@Override
protected File persistentPath() {
return FileCommands.replaceExtension(input.toPath(),"dep").toFile();
}
@Override
protected Stamper defaultStamper() {
return FileHashStamper.instance;
}
@Override
protected EmptyBuildOutput build() throws IOException {
File req;
int number = 0;
String inputWithoutExt = FileCommands.dropExtension(input
.getAbsolutePath());
char lastInputChar = inputWithoutExt.charAt(inputWithoutExt
.length() - 1);
if (Character.isDigit(lastInputChar)) {
number = Integer.parseInt(new String(
new char[] { lastInputChar })) + 1;
} else {
fail("Invalid file");
}
if (number == 10) {
number = 0;
}
req = new File(inputWithoutExt.substring(0,
inputWithoutExt.length() - 1)
+ number + ".txt");
requireBuild(testFactory, req);
return EmptyBuildOutput.instance;
}
}
private File getDepPathWithNumber(int num) {
return new File(baseDir, "Test" + num + ".dep").getAbsoluteFile();
}
private File getPathWithNumber(int num) {
return new File(baseDir, "Test" + num + ".txt").getAbsoluteFile();
}
@Test
public void testCyclesDetected() throws IOException {
try {
BuildManager
.build(new BuildRequest<File, EmptyBuildOutput, TestBuilder, BuilderFactory<File, EmptyBuildOutput, TestBuilder>>(
testFactory, getPathWithNumber(0)));
} catch (RequiredBuilderFailed e) {
assertTrue("Cause is not a cycle",
e.getCause() instanceof BuildCycleException);
BuildCycleException cycle = (BuildCycleException) e.getCause();
assertEquals("Wrong cause path", getDepPathWithNumber(0), cycle
.getCycleCause().getPersistentPath());
List<BuildRequirement<?>> cyclicUnits = cycle.getCycleComponents();
assertEquals("Wrong number of units in cycle", 10,
cyclicUnits.size());
for (int i = 0; i < 10; i++) {
BuildRequirement<?> requirement = null;
for (BuildRequirement<?> req : cyclicUnits) {
if (AbsoluteComparedFile.equals(req.getUnit().getPersistentPath(),
getDepPathWithNumber(i))) {
requirement = req;
}
}
assertTrue("No requirement for " + i, requirement != null);
assertTrue(requirement.getUnit() != null);
assertTrue("Wrong persistence path for unit", AbsoluteComparedFile.equals(
getDepPathWithNumber(i),
requirement.getUnit().getPersistentPath()));
assertEquals("Wrong factory for unit", testFactory,
requirement.getRequest().factory);
assertTrue("Wrong input for unit", AbsoluteComparedFile.equals(getPathWithNumber(i),
(File) requirement.getRequest().input));
}
}
}
}
| pluto/test/build/pluto/test/build/BuildManagerCycleDetectionTest.java | package build.pluto.test.build;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.sugarj.common.FileCommands;
import build.pluto.builder.BuildCycleException;
import build.pluto.builder.BuildManager;
import build.pluto.builder.BuildRequest;
import build.pluto.builder.Builder;
import build.pluto.builder.BuilderFactory;
import build.pluto.builder.RequiredBuilderFailed;
import build.pluto.dependency.BuildRequirement;
import build.pluto.stamp.FileHashStamper;
import build.pluto.stamp.Stamper;
import build.pluto.test.EmptyBuildOutput;
import build.pluto.util.AbsoluteComparedFile;
public class BuildManagerCycleDetectionTest {
private static File baseDir = new File("testdata/CycleDetectionTest/");
@Before
public void emptyDir() throws IOException{
FileCommands.delete(baseDir.toPath());
FileCommands.createDir(baseDir.toPath());
}
public static final BuilderFactory<File, EmptyBuildOutput, TestBuilder> testFactory = new BuilderFactory<File, EmptyBuildOutput, TestBuilder>() {
private static final long serialVersionUID = 3231801709410953205L;
@Override
public TestBuilder makeBuilder(File input) {
return new TestBuilder(input);
}
};
private static class TestBuilder extends Builder<File, EmptyBuildOutput> {
private TestBuilder(File input) {
super(input);
}
@Override
protected String description() {
return "Test Builder " + input.getAbsolutePath();
}
@Override
protected File persistentPath() {
return FileCommands.replaceExtension(input.toPath(),"dep").toFile();
}
@Override
protected Stamper defaultStamper() {
return FileHashStamper.instance;
}
@Override
protected EmptyBuildOutput build() throws IOException {
File req;
int number = 0;
String inputWithoutExt = FileCommands.dropExtension(input
.getAbsolutePath());
char lastInputChar = inputWithoutExt.charAt(inputWithoutExt
.length() - 1);
if (Character.isDigit(lastInputChar)) {
number = Integer.parseInt(new String(
new char[] { lastInputChar })) + 1;
} else {
fail("Invalid file");
}
if (number == 10) {
number = 0;
}
req = new File(inputWithoutExt.substring(0,
inputWithoutExt.length() - 1)
+ number + ".txt");
requireBuild(testFactory, req);
return EmptyBuildOutput.instance;
}
}
private File getDepPathWithNumber(int num) {
return new File(baseDir, "Test" + num + ".dep");
}
private File getPathWithNumber(int num) {
return new File(baseDir, "Test" + num + ".txt");
}
@Test
public void testCyclesDetected() throws IOException {
try {
BuildManager
.build(new BuildRequest<File, EmptyBuildOutput, TestBuilder, BuilderFactory<File, EmptyBuildOutput, TestBuilder>>(
testFactory, getPathWithNumber(0)));
} catch (RequiredBuilderFailed e) {
assertTrue("Cause is not a cycle",
e.getCause() instanceof BuildCycleException);
BuildCycleException cycle = (BuildCycleException) e.getCause();
assertEquals("Wrong cause path", getDepPathWithNumber(0), cycle
.getCycleCause().getPersistentPath());
List<BuildRequirement<?>> cyclicUnits = cycle.getCycleComponents();
assertEquals("Wrong number of units in cycle", 10,
cyclicUnits.size());
for (int i = 0; i < 10; i++) {
BuildRequirement<?> requirement = null;
for (BuildRequirement<?> req : cyclicUnits) {
if (AbsoluteComparedFile.equals(req.getUnit().getPersistentPath(),
getDepPathWithNumber(i))) {
requirement = req;
}
}
assertTrue("No requirement for " + i, requirement != null);
assertTrue(requirement.getUnit() != null);
assertTrue("Wrong persistence path for unit", AbsoluteComparedFile.equals(
getDepPathWithNumber(i),
requirement.getUnit().getPersistentPath()));
assertEquals("Wrong factory for unit", testFactory,
requirement.getRequest().factory);
assertTrue("Wrong input for unit", AbsoluteComparedFile.equals(getPathWithNumber(i),
(File) requirement.getRequest().input));
}
}
}
}
| fix cycle test, avoid mixture of relative and absolute paths
| pluto/test/build/pluto/test/build/BuildManagerCycleDetectionTest.java | fix cycle test, avoid mixture of relative and absolute paths |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.